diff --git a/pyproject.toml b/pyproject.toml index b89faac..a85431e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ name = "tile2net" version = "0.4.0" description = "Pedestrian networks from aerial imagery tiles" readme = "README.md" -requires-python = ">=3.10" +requires-python = ">=3.10,<3.12" license = { file = "LICENSE" } keywords = [ "urban-analytics", diff --git a/requirements-dev.txt b/requirements-dev.txt index fca002f..c86ffb1 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,9 +1,8 @@ centerline jupyter -geopandas +geopandas>=1.0 geopy matplotlib -momepy numba opencv-python osmnx @@ -37,3 +36,4 @@ nbsphinx pandoc sphinx-autobuild sphinx-copybutton +numpy<2.0 \ No newline at end of file diff --git a/src/tile2net/raster/geocode.py b/src/tile2net/raster/geocode.py index 4559d1b..9c75c5b 100644 --- a/src/tile2net/raster/geocode.py +++ b/src/tile2net/raster/geocode.py @@ -1,8 +1,9 @@ from __future__ import annotations +from functools import cached_property import functools import os.path -from functools import cached_property + from pathlib import Path from typing import * diff --git a/src/tile2net/raster/grid.py b/src/tile2net/raster/grid.py index c8d0127..6397682 100644 --- a/src/tile2net/raster/grid.py +++ b/src/tile2net/raster/grid.py @@ -15,6 +15,7 @@ from tile2net.raster.tile_utils.topology import fill_holes, replace_convexhull from concurrent.futures import ThreadPoolExecutor, Future, as_completed +import shutil os.environ['USE_PYGEOS'] = '0' import geopandas as gpd @@ -644,8 +645,10 @@ def save_ntw_polygon(self, crs_metric: int = 3857): simplified.to_crs(self.crs, inplace=True) self.ntw_poly = simplified - simplified.to_file( - os.path.join(poly_fold, f'{self.name}-Polygons-{datetime.datetime.now().strftime("%d-%m-%Y_%H")}')) + path = os.path.join(poly_fold, f'{self.name}-Polygons-{datetime.datetime.now().strftime("%d-%m-%Y_%H_%M")}') + if os.path.exists(path): + shutil.rmtree(path) + simplified.to_file(path ) logging.info('Polygons are generated and saved!') def save_ntw_polygons( @@ -681,7 +684,12 @@ def save_ntw_polygons( simplified.to_crs(self.crs, inplace=True) self.ntw_poly = simplified - path = os.path.join(poly_fold, f'{self.name}-Polygons-{datetime.datetime.now().strftime("%d-%m-%Y_%H")}') + path = os.path.join( + poly_fold, + f'{self.name}-Polygons-{datetime.datetime.now().strftime("%d-%m-%Y_%H_%M")}' + ) + if os.path.exists(path): + shutil.rmtree(path) simplified.to_file(path) logging.info('Polygons are generated and saved!') diff --git a/src/tile2net/raster/pednet.py b/src/tile2net/raster/pednet.py index 0b01b9c..c68bacd 100644 --- a/src/tile2net/raster/pednet.py +++ b/src/tile2net/raster/pednet.py @@ -1,5 +1,7 @@ import logging import datetime +import shutil + import pandas as pd import os @@ -300,7 +302,7 @@ def create_sidewalks(self): # swntw.geometry = swntw.simplify(0.6) sw_modif_uni = gpd.GeoDataFrame( - geometry=gpd.GeoSeries([geom for geom in swntw.unary_union.geoms])) + geometry=gpd.GeoSeries([geom for geom in swntw.unary_union.geoms])) sw_modif_uni_met = set_gdf_crs(sw_modif_uni, 3857) sw_uni_lines = sw_modif_uni_met.explode() sw_uni_lines.reset_index(drop=True, inplace=True) @@ -412,8 +414,8 @@ def convert_whole_poly2line(self): for k, v in indcwnear: island_lines.append( - shapely.shortest_line(self.island.geometry.values[v], - pdfb.geometry.values[k])) + shapely.shortest_line(self.island.geometry.values[v], + pdfb.geometry.values[k])) island = gpd.GeoDataFrame(geometry=island_lines) @@ -435,9 +437,9 @@ def convert_whole_poly2line(self): path = self.project.network.path path.mkdir(parents=True, exist_ok=True) - path = path.joinpath( - f'{self.project.name}-Network-{datetime.datetime.now().strftime("%d-%m-%Y_%H")}' - ) + path = path.joinpath(f'{self.project.name}-Network-{datetime.datetime.now().strftime("%d-%m-%Y_%H_%M")}') + if os.path.exists(path): + shutil.rmtree(path) combined.to_file(path) self.complete_net = combined diff --git a/src/tile2net/raster/source.py b/src/tile2net/raster/source.py index f186662..0b234a5 100644 --- a/src/tile2net/raster/source.py +++ b/src/tile2net/raster/source.py @@ -114,7 +114,10 @@ def __getitem__( matches: GeoSeries = SourceMeta.coverage.geometry geocode = GeoCode.from_inferred(item) loc = matches.intersects(geocode.polygon) - if not loc.any(): + if ( + not loc.any() + and 'address' in geocode.__dict__ + ): # user must've been lazy; compute a new polygon del geocode.address _ = geocode.address @@ -140,7 +143,10 @@ def __getitem__( ) loc.append(append) - if not any(loc): + if ( + not any(loc) + and 'address' in geocode.__dict__ + ): # user must've been lazy; compute a new address loc = [] del geocode.address @@ -159,6 +165,8 @@ def __getitem__( if any(loc): matches = matches.loc[loc] + elif 'address' not in geocode.__dict__: + raise SourceNotFound else: logger.warning( f'No keyword matches found for {item=} using ' @@ -188,18 +196,14 @@ def __getitem__( raise TypeError(f'Invalid type {type(item)} for {item}') return source() - def __init__(self: Type[Source], name, bases, attrs, **kwargs): - # super(type(self), self).__init__(name, bases, attrs, **kwargs) - super().__init__(name, bases, attrs) - if ( - ABC not in bases - and kwargs.get('init', True) - ): - if self.name is None: - raise ValueError(f'{self} must have a name') - if self.name in self.catalog: - raise ValueError(f'{self} name already in use') - self.catalog[self.name] = self + # def __init__(self: Type[Source], name, bases, attrs, **kwargs): + # super().__init__(name, bases, attrs) + # if not getattr(self, 'ignore', False): + # if self.name is None: + # raise ValueError(f'{self} must have a name') + # if self.name in self.catalog: + # raise ValueError(f'{self} name already in use') + # self.catalog[self.name] = self class Source(ABC, metaclass=SourceMeta): @@ -229,8 +233,16 @@ def __str__(self): return self.name def __init_subclass__(cls, **kwargs): - # complains if gets kwargs super().__init_subclass__() + if ( + not getattr(cls, 'ignore', False) + and ABC not in cls.__bases__ + ): + if cls.name is None: + raise ValueError(f'{cls} must have a name') + if cls.name in cls.catalog: + raise ValueError(f'{cls} name already in use') + cls.catalog[cls.name] = cls def __eq__(self, other): if ( @@ -368,6 +380,7 @@ class KingCountyWashington(ArcGis): class WashingtonDC(ArcGis): + # ignore = True server = 'https://imagery.dcgis.dc.gov/dcgis/rest/services/Ortho/Ortho_2021/ImageServer' name = 'dc' tilesize = 512 @@ -402,16 +415,17 @@ class LosAngeles(ArcGis): # raise NotImplementedError -# class WestOregon(ArcGis, init=False): # class WestOregon(ArcGis): +# ignore = True # server = 'https://imagery.oregonexplorer.info/arcgis/rest/services/OSIP_2018/OSIP_2018_WM/ImageServer' # name = 'w_or' # extension = 'jpeg' # keyword = 'Oregon' # # todo: ssl incorrectly configured; come back later # -# # class EastOregon(ArcGis, init=False): + # class EastOregon(ArcGis, init=False): +# ignore = True # # server = 'https://imagery.oregonexplorer.info/arcgis/rest/services/OSIP_2017/OSIP_2017_WM/ImageServer' # name = 'e_or' diff --git a/src/tile2net/raster/tile_utils/momepy_shapes.py b/src/tile2net/raster/tile_utils/momepy_shapes.py index 18c19ba..7f0becd 100644 --- a/src/tile2net/raster/tile_utils/momepy_shapes.py +++ b/src/tile2net/raster/tile_utils/momepy_shapes.py @@ -57,7 +57,6 @@ "Elongation", "CentroidCorners", "Linearity", - "CompactnessWeightedAxis", ] @@ -1306,82 +1305,3 @@ def _dist(self, a, b): return math.hypot(b[0] - a[0], b[1] - a[1]) -class CompactnessWeightedAxis: - """ - Calculates the compactness-weighted axis of each object in a given GeoDataFrame. - Initially designed for blocks. - - .. math:: - d_{i} \\times\\left(\\frac{4}{\\pi}-\\frac{16 (area_{i})} - {perimeter_{i}^{2}}\\right) - - Parameters - ---------- - gdf : GeoDataFrame - A GeoDataFrame containing objects. - areas : str, list, np.array, pd.Series (default None) - The name of the dataframe column, ``np.array``, or ``pd.Series`` where - area value are stored . If set to ``None``, this function will calculate areas - during the process without saving them separately. - perimeters : str, list, np.array, pd.Series (default None) - The name of the dataframe column, ``np.array``, or ``pd.Series`` where - perimeter values are stored. If set to ``None``, this function will calculate - perimeters during the process without saving them separately. - longest_axis : str, list, np.array, pd.Series (default None) - The name of the dataframe column, ``np.array``, or ``pd.Series`` where - longest axis length values are stored. If set to ``None``, this function will - calculate longest axis lengths during the process without saving them - separately. - - Attributes - ---------- - series : Series - A Series containing resulting values - gdf : GeoDataFrame - The original GeoDataFrame. - areas : Series - A Series containing used area values. - longest_axis : Series - A Series containing used area values. - perimeters : Series - A Series containing used area values. - - Examples - -------- - >>> blocks_df['cwa'] = mm.CompactnessWeightedAxis(blocks_df).series - """ - - def __init__(self, gdf, areas=None, perimeters=None, longest_axis=None): - self.gdf = gdf - gdf = gdf.copy() - - if perimeters is None: - gdf["mm_p"] = gdf.geometry.length - perimeters = "mm_p" - else: - if not isinstance(perimeters, str): - gdf["mm_p"] = perimeters - perimeters = "mm_p" - self.perimeters = gdf[perimeters] - if longest_axis is None: - from momepy.dimension import LongestAxisLength - - gdf["mm_la"] = LongestAxisLength(gdf).series - longest_axis = "mm_la" - else: - if not isinstance(longest_axis, str): - gdf["mm_la"] = longest_axis - longest_axis = "mm_la" - self.longest_axis = gdf[longest_axis] - if areas is None: - areas = gdf.geometry.area - if not isinstance(areas, str): - gdf["mm_a"] = areas - areas = "mm_a" - self.areas = gdf[areas] - self.series = pd.Series( - gdf[longest_axis] - * ((4 / np.pi) - (16 * gdf[areas]) / ((gdf[perimeters]) ** 2)), - index=gdf.index, - ) - diff --git a/src/tile2net/raster/tile_utils/topology.py b/src/tile2net/raster/tile_utils/topology.py index c9627d2..e8ba1d8 100644 --- a/src/tile2net/raster/tile_utils/topology.py +++ b/src/tile2net/raster/tile_utils/topology.py @@ -932,7 +932,7 @@ def extend_lines(gdf, tolerance, target=None, barrier=None, extension=0): points = vectorize_points(np.unique(coords[edges], axis=0)) # query LineString geometry to identify points intersecting 2 geometries - inp, res = df.sindex.query_bulk(geo2geodf(points).geometry, predicate="intersects") + inp, res = df.sindex.query(geo2geodf(points).geometry, predicate="intersects") unique, counts = np.unique(inp, return_counts=True) ends = np.unique(res[np.isin(inp, unique[counts == 1])]) diff --git a/src/tile2net/raster/util.py b/src/tile2net/raster/util.py index 6abdf91..9a1ce1b 100644 --- a/src/tile2net/raster/util.py +++ b/src/tile2net/raster/util.py @@ -146,6 +146,7 @@ def name_from_location(location: str | list[float, str]): return name raise TypeError(f"location must be str or list, not {type(location)}") + if __name__ == '__main__': print(name_from_location('New York, NY, USA')) print(name_from_location([1.22456789, 2.3456789, 3.456789, 4.56789])) diff --git a/src/tile2net/tileseg/inference/inference.py b/src/tile2net/tileseg/inference/inference.py index fd61155..f3819ca 100644 --- a/src/tile2net/tileseg/inference/inference.py +++ b/src/tile2net/tileseg/inference/inference.py @@ -510,10 +510,6 @@ def inference(args: Namespace): return inference.inference() -def func( *args, **kwargs ): - ... - - if __name__ == '__main__': """ --city_info /tmp/tile2net/washington_square_park/tiles/washington_square_park_256_info.json --interactive --dump_percent 10 diff --git a/src/tile2net/tileseg/utils/trnval_utils.py b/src/tile2net/tileseg/utils/trnval_utils.py index 55b2c66..8fa1bbe 100644 --- a/src/tile2net/tileseg/utils/trnval_utils.py +++ b/src/tile2net/tileseg/utils/trnval_utils.py @@ -296,7 +296,12 @@ def validate_topn(val_loader, net, criterion, optim, epoch, args, dump_assets=Tr output_data = torch.nn.functional.softmax(output, dim=1).cpu().data op = output_data.cpu().detach().numpy() - np.save(f'{cfg.RESULT_DIR}/output_{epoch}_{val_idx}.npy', op) + path = os.path.join( + cfg.RESULT_DIR, + f'output_{epoch}_{val_idx}.npy' + ) + os.makedirs(cfg.RESULT_DIR, exist_ok=True) + np.save(path, op) prob_mask, predictions = output_data.max(1) #define assests based on the eval_minibatch function assets = {} diff --git a/tests/test_remote.py b/tests/test_remote.py index 51738de..d7402a7 100644 --- a/tests/test_remote.py +++ b/tests/test_remote.py @@ -1,5 +1,6 @@ import abc +import pytest import tile2net.raster.source from tile2net.raster.raster import Raster @@ -17,6 +18,7 @@ def test_small(): raster.generate(2) raster.inference('--remote', '--debug') + def test_sources(): import tile2net.raster.source as source from tile2net.raster.source import Source @@ -26,10 +28,12 @@ def test_sources(): not isinstance(cls, type) or not issubclass(cls, Source) or abc.ABC in cls.__bases__ + or getattr(cls, 'ignore', False) ): continue # assert querying by the polygon returns the same source - assert Source[cls.coverage.unary_union] == cls + # assert Source[cls.coverage.unary_union] == cls + assert Source[cls.coverage.union_all()] == cls # assert querying by the keyword returns the same source if isinstance(cls.keyword, str): assert Source[cls.keyword] == cls @@ -83,7 +87,21 @@ def test_sources(): assert Source['Maywood, California'] == source.LosAngeles assert Source['Maywood, CA'] == source.LosAngeles + # namibia + assert Source[-15.49933207, 28.203229539, -15.338660813, 28.358324353] == None + # beijing + assert Source[39.525834067367256, 116.21383162969653, 39.582584, 116.292915] == None + # ocean + assert Source[-37.15612782594927, 64.98947402062927, -37.15612782594927, 64.98947402062927] == None + # greenland + assert Source[73.59343881883807, -51.62165778082543, 73.59343881883807, -51.62165778082543] == None + # russia + assert Source[54.998172689668486, 36.68930259694381, 55.00000000000001, 36.69112990727614] == None + # algeria + assert Source[25.06763435341293, -0.7971811600872423, 25.06763435341293, -0.7971811600872423] == None + # gulf of mexico + assert Source[21.82528963135751, -93.76345422053639, 21.82528963135751, -93.76345422053639] == None + if __name__ == '__main__': - test_geocode() test_small() test_sources()