Datasets:
Modalities:
Geospatial
License:
import os | |
import zipfile | |
import geopandas as gpd | |
import pandas as pd | |
from tqdm import tqdm # For progress bars | |
import warnings | |
import multiprocessing as mp | |
import sys # Import the sys module | |
# Ignore specific warnings | |
warnings.filterwarnings("ignore", category=RuntimeWarning, | |
message="driver GML does not support open option DRIVER") | |
warnings.filterwarnings("ignore", category=RuntimeWarning, | |
message="Non closed ring detected. To avoid accepting it, set the OGR_GEOMETRY_ACCEPT_UNCLOSED_RING configuration option to NO") | |
def process_region(region_zip_path, output_dir): | |
"""Processes a single region zip file to extract and save commune and parcel data.""" | |
region_name = os.path.basename(region_zip_path).replace(".zip", "") # Extract region name from filename. | |
all_communes = [] | |
all_parcels = [] | |
try: | |
with zipfile.ZipFile(region_zip_path, 'r') as region_zip: | |
city_zip_names = [f.filename for f in region_zip.filelist if f.filename.endswith('.zip')] | |
for city_zip_name in city_zip_names: | |
city_zip_path = region_zip.open(city_zip_name) | |
try: | |
with zipfile.ZipFile(city_zip_path, 'r') as city_zip: | |
commune_zip_names = [f.filename for f in city_zip.filelist if f.filename.endswith('.zip')] | |
for commune_zip_name in commune_zip_names: | |
try: | |
commune_zip_path = city_zip.open(commune_zip_name) | |
with zipfile.ZipFile(commune_zip_path, 'r') as commune_zip: | |
# Find GML files | |
gml_files = [f.filename for f in commune_zip.filelist if | |
f.filename.endswith('.gml')] | |
commune_gml = next((f for f in gml_files if '_map.gml' in f), | |
None) # Find map.gml | |
parcel_gml = next((f for f in gml_files if '_ple.gml' in f), | |
None) # Find ple.gml | |
if commune_gml: | |
try: | |
commune_gdf = gpd.read_file(commune_zip.open(commune_gml), | |
driver='GML') | |
all_communes.append(commune_gdf) | |
except Exception as e: | |
print( | |
f"Error reading commune GML {commune_gml} from {commune_zip_name}: {e}") | |
if parcel_gml: | |
try: | |
parcel_gdf = gpd.read_file(commune_zip.open(parcel_gml), | |
driver='GML') | |
all_parcels.append(parcel_gdf) | |
except Exception as e: | |
print( | |
f"Error reading parcel GML {parcel_gml} from {commune_zip_name}: {e}") | |
except zipfile.BadZipFile as e: | |
print(f"Bad Zip file encountered: {commune_zip_name} - {e}") | |
except Exception as e: | |
print(f"Error processing {commune_zip_name}: {e}") | |
except zipfile.BadZipFile as e: | |
print(f"Bad Zip file encountered: {city_zip_name} - {e}") | |
except Exception as e: | |
print(f"Error processing {city_zip_name}: {e}") | |
except zipfile.BadZipFile as e: | |
print(f"Bad Zip file encountered: {region_zip_name} - {e}") | |
except Exception as e: | |
print(f"Error processing {region_zip_name}: {e}") | |
# Concatenate and save for the region | |
try: | |
if all_communes: | |
communes_gdf = gpd.GeoDataFrame(pd.concat(all_communes, ignore_index=True)) | |
# handle crs here. | |
if all_communes and hasattr(all_communes[0], 'crs') and all_communes[0].crs: # Check if not empty list | |
try: | |
communes_gdf.crs = all_communes[0].crs | |
except AttributeError as e: | |
print(f"Could not set CRS: {e}") | |
else: | |
print("WARNING: CRS information is missing from the input data.") | |
# Identify and convert problematic columns to strings | |
problem_columns = [] | |
for col in communes_gdf.columns: | |
if col != 'geometry': | |
try: | |
communes_gdf[col] = pd.to_numeric(communes_gdf[col], errors='raise') | |
except (ValueError, TypeError): | |
problem_columns.append(col) | |
for col in problem_columns: | |
communes_gdf[col] = communes_gdf[col].astype(str) | |
# Try to set the geometry | |
if 'msGeometry' in communes_gdf.columns: | |
communes_gdf = communes_gdf.set_geometry('msGeometry') | |
elif 'geometry' in communes_gdf.columns: | |
communes_gdf = communes_gdf.set_geometry('geometry') # Already the default, but be explicit | |
else: | |
print( | |
"WARNING: No 'geometry' or 'msGeometry' column found in commune data. Spatial operations will not work.") | |
communes_gdf.to_parquet( | |
os.path.join(output_dir, f"{region_name}_communes.geoparquet"), | |
compression='gzip') | |
print( | |
f"Successfully saved {region_name} communes to {output_dir}/{region_name}_communes.geoparquet") | |
if all_parcels: | |
parcels_gdf = gpd.GeoDataFrame(pd.concat(all_parcels, ignore_index=True)) | |
# handle crs here. | |
if all_parcels and hasattr(all_parcels[0], 'crs') and all_parcels[0].crs: | |
try: | |
parcels_gdf.crs = all_parcels[0].crs | |
except AttributeError as e: | |
print(f"Could not set CRS: {e}") | |
else: | |
print("WARNING: CRS information is missing from the input data.") | |
# Identify and convert problematic columns to strings | |
problem_columns = [] | |
for col in parcels_gdf.columns: | |
if col != 'geometry': | |
try: | |
parcels_gdf[col] = pd.to_numeric(parcels_gdf[col], errors='raise') | |
except (ValueError, TypeError): | |
problem_columns.append(col) | |
for col in problem_columns: | |
parcels_gdf[col] = parcels_gdf[col].astype(str) | |
# Try to set the geometry | |
if 'msGeometry' in parcels_gdf.columns: | |
parcels_gdf = parcels_gdf.set_geometry('msGeometry') | |
elif 'geometry' in parcels_gdf.columns: | |
parcels_gdf = parcels_gdf.set_geometry('geometry') # Already the default, but be explicit | |
else: | |
print( | |
"WARNING: No 'geometry' or 'msGeometry' column found in parcel data. Spatial operations will not work.") | |
parcels_gdf.to_parquet(os.path.join(output_dir, f"{region_name}_parcels.geoparquet"), | |
compression='gzip') | |
print( | |
f"Successfully saved {region_name} parcels to {output_dir}/{region_name}_parcels.geoparquet") | |
except Exception as e: | |
print(f"Error saving GeoParquet files for {region_name}: {e}") | |
def process_italy_data_unzipped_parallel(root_dir, output_dir, num_processes=mp.cpu_count()): | |
""" | |
Processes the Italian data in parallel, leveraging multiprocessing. | |
""" | |
os.makedirs(output_dir, exist_ok=True) | |
region_zip_paths = [os.path.join(root_dir, f) for f in os.listdir(root_dir) if f.endswith('.zip')] | |
total_regions = len(region_zip_paths) | |
# Add this block to protect the entry point | |
if __name__ == '__main__': | |
# For macOS, you might need to set the start method to 'spawn' | |
if sys.platform == 'darwin': | |
mp.set_start_method('spawn') | |
with mp.Pool(processes=num_processes) as pool: | |
# Use pool.starmap to pass multiple arguments to process_region | |
results = list(tqdm(pool.starmap(process_region, [(region_zip_path, output_dir) for region_zip_path in region_zip_paths]), total=total_regions, desc="Overall Progress: Regions")) | |
# Example Usage: | |
root_dir = "ITALIA" # Path to the ITALIA directory | |
output_dir = "output" # Path to save the GeoParquet files | |
num_processes = mp.cpu_count() # Use all available CPU cores | |
process_italy_data_unzipped_parallel(root_dir, output_dir, num_processes) |