zhangnaiwen 1 год назад
Родитель
Сommit
a7495472ed
60 измененных файлов с 6583 добавлено и 0 удалено
  1. 0 0
      bin/worker.py
  2. 70 0
      src/app/api/mission.py
  3. 0 0
      src/app/mission_jobs/__init__.py
  4. 30 0
      src/app/mission_jobs/image_slice_job.py
  5. 236 0
      src/app/utils/create_geojsonl.py
  6. 30 0
      src/application.py
  7. 59 0
      src/connection.py
  8. 0 0
      src/mission/__init__.py
  9. 80 0
      src/mission/image_slice.py
  10. 239 0
      src/slice.py
  11. 34 0
      src/starearth/__init__.py
  12. 188 0
      src/starearth/bound.py
  13. 87 0
      src/starearth/cache.py
  14. 40 0
      src/starearth/cog_convert.py
  15. 127 0
      src/starearth/colormap.py
  16. 97 0
      src/starearth/coordinate.py
  17. 106 0
      src/starearth/datatype.py
  18. 50 0
      src/starearth/filename.py
  19. 50 0
      src/starearth/filesystem/__init__.py
  20. 69 0
      src/starearth/filesystem/cache.py
  21. 52 0
      src/starearth/filesystem/filesystem.py
  22. 35 0
      src/starearth/grid/__init__.py
  23. 109 0
      src/starearth/grid/ge.py
  24. 59 0
      src/starearth/grid/grid.py
  25. 97 0
      src/starearth/grid/mct.py
  26. 46 0
      src/starearth/grid/sm.py
  27. 30 0
      src/starearth/nodata.py
  28. 0 0
      src/starearth/output/__init__.py
  29. 68 0
      src/starearth/output/bil.py
  30. 81 0
      src/starearth/output/jpeg.py
  31. 42 0
      src/starearth/output/output.py
  32. 70 0
      src/starearth/output/png.py
  33. 91 0
      src/starearth/output/raw.py
  34. 0 0
      src/starearth/renderer/__init__.py
  35. 79 0
      src/starearth/renderer/greyscale.py
  36. 98 0
      src/starearth/renderer/mapbox_rgb.py
  37. 57 0
      src/starearth/renderer/rgb.py
  38. 322 0
      src/starearth/scene.py
  39. 79 0
      src/starearth/sheet.py
  40. 204 0
      src/starearth/shp_to_geojsonl.py
  41. 319 0
      src/starearth/slicer.py
  42. 0 0
      src/starearth/storage/__init__.py
  43. 208 0
      src/starearth/storage/arcgis_zyx.py
  44. 462 0
      src/starearth/storage/fast_dfs.py
  45. 299 0
      src/starearth/storage/mbtiles.py
  46. 303 0
      src/starearth/storage/mongo.py
  47. 87 0
      src/starearth/storage/osm_zxy.py
  48. 242 0
      src/starearth/storage/ptp.py
  49. 305 0
      src/starearth/storage/smtiles.py
  50. 41 0
      src/starearth/storage/storage.py
  51. 305 0
      src/starearth/storage/svtiles.py
  52. 2 0
      src/starearth/utils/__init__.py
  53. 24 0
      src/starearth/utils/general_utils.py
  54. 279 0
      src/starearth/utils/mer_geo.py
  55. 21 0
      src/starearth/utils/sql_conn.py
  56. 202 0
      src/starearth/utils/tileset.py
  57. 65 0
      src/starearth/utils/timeit.py
  58. 149 0
      src/starearth/utils/utils.py
  59. 59 0
      src/starearth/xml_to_geojsonl.py
  60. 0 0
      src/utils/__init__.py

+ 0 - 0
bin/worker.py


+ 70 - 0
src/app/api/mission.py

@@ -0,0 +1,70 @@
+import json
+import traceback
+
+from flask import request, jsonify
+from flask_restx import Resource, Namespace, reqparse
+
+from app.defines import StatesCode
+from application import Application
+from starearth.utils.general_utils import print_log
+
+ns = Namespace('mission', description='任务管理 API接口')
+
+slice_mission_parser = reqparse.RequestParser(bundle_errors=True)
+slice_mission_parser.add_argument(name='data_path', type=str, location='from', required=False,help='数据目录')
+slice_mission_parser.add_argument(name='tile_size', type=int, location='from', required=False,help='瓦片大小')
+slice_mission_parser.add_argument(name='tile_format', type=int, location='from', required=False,help='瓦片格式:png、jpeg、tif')
+slice_mission_parser.add_argument(name='auto_zoom', type=int, location='from', required=False,help='是否自动切片,0:否,1:是')
+slice_mission_parser.add_argument(name='min_zoom', type=int, location='from', required=False,help='最小切片层级')
+slice_mission_parser.add_argument(name='max_zoom', type=int, location='from', required=False,help='最大切片层级')
+
+
+
+@ns.route('/mission_slice_api')
+class MissionAPI(Resource):
+
+    @ns.doc(id='slice', description='切片')
+    @ns.expect(slice_mission_parser)
+    def post(self):
+        """添加任务"""
+        try:
+            form = request.form
+            data_path = form.get('data_path')
+            tile_size = form.get('tile_size')
+            tile_grid = form.get('tile_grid')
+            tile_format = form.get('tile_format')
+            auto_zoom = form.get('auto_zoom')
+            min_zoom = form.get('min_zoom')
+            max_zoom = form.get('max_zoom')
+
+            application = Application()
+            application.new_mission(
+                data_path=data_path,
+                tile_size=tile_size,
+                tile_grid=tile_grid,
+                tile_format=tile_format,
+                auto_zoom=auto_zoom,
+                min_zoom=min_zoom,
+                max_zoom=max_zoom
+
+            )
+
+            # 添加至dem
+            # try:
+            #     key_status_upload(proxy_host=config.common.RPOXY_SERVICE_HOST,       # 上报服务的IP
+            #                       proxy_port=str(config.common.RPOXY_SERVICE_PORT),  # 上报服务的端口
+            #                       software_key=root_mission_id,                      # 任务标识
+            #                       software_name=mission_name,                        # 任务名称
+            #                       software_status='0',                               # 字符串类型的数字
+            #                       software_warn_desc='任务创建成功',                   # 描述
+            #                       is_enable=config.common.ENABLE_PROXY_BASE)         # 是否上报
+            # except Exception as err:
+            #     print_log('任务上报失败')
+
+            return {"code": StatesCode.SUCCESS, "message": "任务添加成功"}
+
+        except Exception as err:
+
+            traceback.print_exc()
+
+            return {"code": StatesCode.UNKNOWN_ERROR, "message": str(err)}

+ 0 - 0
src/app/mission_jobs/__init__.py


+ 30 - 0
src/app/mission_jobs/image_slice_job.py

@@ -0,0 +1,30 @@
+import os
+
+import rq
+
+from config import Config
+from connection import Connection
+from mission.image_slice import slice_zxy
+
+
+def image_slice_job(data_path, output_path, tile_size, tile_grid, tile_format, auto_zoom, min_zoom, max_zoom):
+
+    config = Config()
+    connection = Connection(config)
+
+    # 添加任务
+    q = rq.Queue(name='default', connection=connection.redis_conn)
+
+    for file_name in data_path:
+        input_file = os.path.join(data_path, file_name)
+        if os.path.splitext(input_file)[-1] == ".tif":
+            q.enqueue(slice_zxy,
+                      kwargs={"input_file": input_file,
+                              "output_path": output_path,
+                              "tile_size": tile_size,
+                              "tile_grid": tile_grid,
+                              "tile_format": tile_format,
+                              "auto_zoom": auto_zoom,
+                              "min_zoom": min_zoom,
+                              "max_zoom": max_zoom}
+                      )

+ 236 - 0
src/app/utils/create_geojsonl.py

@@ -0,0 +1,236 @@
+import os
+import traceback
+import rasterio
+import rasterio.features
+import rasterio.warp
+import shapely.geometry
+from geojson import Feature
+from osgeo import osr, gdal
+from starearth.coordinate import EPSG
+from starearth.utils.mer_geo import NewGlobalMercator
+
+
+def create_default_geojsonl(input_file, geojsonl, properties=None):
+    """
+    根据输入的tif数据,生成同名的geojsonl
+    :param input_file: tif文件的绝对路径
+    :param geojsonl: geojsonl文件的绝对路径
+    :param properties: 各种属性
+    :return: None
+    """
+
+    if not properties:
+        properties = {"item_id": "default", "date": "default"}
+
+        pass
+
+    # ds = gdal.Open(new_input_file)
+    # temp = '{"type": "Feature","properties": {}, "geometry": {}}'
+    # [[[],[],[],[],[]]]
+    # gt = ds.GetGeoTransform()
+    # width, height = ds.RasterXSize, ds.RasterYSize
+    # left, top = gt[0], gt[3]
+    # res_x, res_y = gt[1], gt[5]
+    # right, bottom = left + res_x * width, top + res_y * height
+    # 计算真实角点坐标(算法来自李志明),TODO:历史遗留 不规则影像需要给定影像的几何范围:shape file或geojson,否则计算不准确
+    # row_top, row_bottom, column_left, column_right = getRealFourCorners(new_input_file)
+    # print('角点坐标:%s,%s,%s,%s' % (row_top, row_bottom, column_left, column_right))
+    # # left, top = row_top
+    # # right, bottom = row_bottom
+    # if epsg != 'EPSG:4326':
+    #     from_srs_id = int(epsg.split(':')[-1])
+    #     row_top = transform_point(point=row_top, from_srs_id=from_srs_id, to_srs_id=4326)
+    #     row_bottom = transform_point(point=row_bottom, from_srs_id=from_srs_id, to_srs_id=4326)
+    #     column_left = transform_point(point=column_left, from_srs_id=from_srs_id, to_srs_id=4326)
+    #     column_right = transform_point(point=column_right, from_srs_id=from_srs_id, to_srs_id=4326)
+    #
+    # coordinates = [
+    #     [list(column_left[:2]), list(row_top[:2]), list(column_right[:2]), list(row_bottom[:2]), list(column_left[:2])]]
+    # temp = """{{"type": "Feature","properties": {"item_id": "default","date": "default"},"geometry": {}}}"""
+
+    # 判断是否有投影信息,没有则赋值一个默认的4326
+    ds = gdal.Open(input_file)
+    crs = ds.GetProjection()
+
+    if not crs:
+        default_wkt = EPSG(4326).wkt
+        ds.SetProjection(default_wkt)
+
+    # rasterio不支持pix格式数据,以及libjpeg: Unsupported JPEG data precision 12,可能还有其他数据打开错误
+    # 对于rasterio无法处理的数据,统一转换成tif数据后,再进行处理
+    try:
+
+        out_tif_path = ''
+
+        with rasterio.open(input_file, 'r') as dataset:
+            dataset.dataset_mask()
+
+    except Exception as err:
+
+        basepath = os.path.dirname(input_file)
+        basename = os.path.basename(input_file)
+        filename = basename.split('.')[0] + '.tif'
+
+        out_tif_path = os.path.join(basepath, filename)
+        input_file = out_tif_path  # 根据转换后的tif数据来生成geojsonl
+
+        ds = gdal.Translate(out_tif_path, ds)
+
+        pass
+
+    del ds
+
+    # 新版算法
+    geojsonl_content = """"""
+
+    with rasterio.open(input_file, 'r') as dataset:
+
+        mask = dataset.dataset_mask()
+
+        for polygon, value in rasterio.features.shapes(mask, transform=dataset.transform):
+            # geometry: {'type': 'Polygon', 'coordinates': [[[36.819799, 23.443863],[36.911464, 23.385531],[36.911464, 23.335532],[36.919797, 23.335532],[36.819799, 23.443863]]]}
+            # value: 255 or 0
+            geometry = rasterio.warp.transform_geom \
+                    (
+                    dataset.crs,
+                    "EPSG:4326",
+                    polygon,
+                    precision=6
+                )
+
+            # 对geojson范围进行化简
+            origin_geom = shapely.geometry.shape(geometry)
+            simplified_geom = origin_geom.simplify(0.0001)
+
+            feature = Feature(type="Feature", properties=properties, geometry=simplified_geom)
+
+            geojsonl_content = geojsonl_content + str(feature) + '\n'
+
+    with open(str(geojsonl), 'w') as f:
+
+        f.write(geojsonl_content)
+
+    # 删除转换后的tif数据
+    if out_tif_path:
+        os.remove(out_tif_path)
+
+        pass
+
+    pass
+
+
+def calc_minmax_zoom2(input_file, epsg, image_tile_size):
+    """
+    自动计算切片级别2
+    :param new_input_file: 输入的影像文件
+    :param epsg: 输入影像的投影,形如`EPSG:3857`
+    :return: 最小级别min_z, 最大级别max_z
+    """
+    ds = gdal.Open(input_file)
+    if epsg != 'EPSG:3857':
+        if isinstance(epsg, int):
+            epsg_id = epsg
+        else:  # "EPSG:4326"
+            epsg_id = int(epsg.split(':')[1])
+        from_srs = osr.SpatialReference()
+        from_srs.ImportFromEPSG(epsg_id)
+        to_srs = osr.SpatialReference()
+        to_srs.ImportFromEPSG(3857)
+        ds = gdal.AutoCreateWarpedVRT(ds, from_srs.ExportToWkt(), to_srs.ExportToWkt())
+
+    gt = ds.GetGeoTransform()
+
+    # mercator = NewGlobalMercator(tile_size=IMAGE_TILE_SIZE)
+    mercator = NewGlobalMercator(tile_size=image_tile_size)
+    # 根据分辨率确定最大切片级别
+    max_z = mercator.ZoomForPixelSize(gt[1])
+    # 最小切片级别设为最大切片级别-3,即自动切4级
+    min_z = max_z - 3
+
+    return min_z, max_z
+
+
+def verify_geotiff(input_file, epsg):
+    """
+    @param: new_input_file, 输入文件路径
+    @param: epsg, 参数可能为: None, '', int或不合法的字符串
+    验证geotiff,传入epsg字符串,返回经过验证的epsg字符串或抛出异常,形如"EPSG:4326"
+    1. 当epsg是int值或'EPSG:4326'字符串时,程序先得到epsg_id
+    2. 获取原始文件内置投影信息,
+              如与输入参数不一致报错;相同继续;
+              如原始文件无投影信息,会先添加投影信息落盘,再返回epsg
+    3. 当传入的epsg为None或''时,从原始文件中获取投影,如无投影信息则报错;如获取到投影时则继续;
+    """
+    # 启用gdal底层异常捕获
+    gdal.UseExceptions()
+
+    try:
+        ds = gdal.Open(input_file, gdal.GA_Update)
+    except Exception as err:
+        ds = gdal.Open(input_file)  # 打开jpg和png格式的数据
+
+    if ds is None:  # 如果tif没有内置投影信息,同时没有配套的tfw数据,会发生这种情况。
+        raise Exception('数据未能正常打开或不支持给定的数据格式.')
+
+    origin_wkt = ds.GetProjection()
+
+    if epsg:
+
+        if isinstance(epsg, int):
+            epsg_id = epsg
+            epsg = 'EPSG:%s' % epsg_id
+
+        else:  # "EPSG:4326"
+            try:
+                epsg_id = int(epsg.split(':')[1])
+
+            except AttributeError:
+                raise Exception('epsg字符串格式错误,正确的格式类似:EPSG:4326.')
+
+            except ValueError:
+                raise Exception('epsg字符串格式错误,正确的格式类似:EPSG:4326.')
+
+        input_srs = osr.SpatialReference()
+        input_srs.ImportFromEPSG(epsg_id)
+
+        if origin_wkt:
+
+            origin_srs = osr.SpatialReference()
+            origin_srs.ImportFromWkt(origin_wkt)
+
+            if input_srs.ExportToProj4().strip() != origin_srs.ExportToProj4().strip():
+                raise Exception('指定的数据坐标系统与原始坐标坐标系统不一致.')
+
+        else:
+
+            sr = osr.SpatialReference()
+            sr.ImportFromEPSG(epsg_id)
+            wkt = sr.ExportToWkt()
+            ds.SetProjection(wkt)
+            # 落盘为安,以便下次读取时能够获取正确的信息。
+            ds.FlushCache()
+
+    else:
+
+        if not origin_wkt:
+            raise Exception('数据不合法:没有投影信息')
+
+        origin_srs = osr.SpatialReference()
+        origin_srs.ImportFromWkt(origin_wkt)
+
+        if origin_srs.IsProjected():
+            epsg_id = origin_srs.GetAuthorityCode(str("PROJCS"))
+
+        else:
+            epsg_id = origin_srs.GetAuthorityCode(str("GEOGCS"))
+
+        if epsg_id:
+            epsg = 'EPSG:%s' % epsg_id
+
+        else:
+            raise Exception('获取不到epsg')
+
+    # 关闭gdal底层异常捕获
+    gdal.DontUseExceptions()
+
+    return epsg

+ 30 - 0
src/application.py

@@ -0,0 +1,30 @@
+import rq
+
+from app.mission_jobs.image_slice_job import image_slice_job
+from config import Config
+from connection import Connection
+
+
+class Application:
+
+    # def __init__(self, config):
+    #     self._config = config
+
+    def new_mission(self, data_path,  tile_size, tile_grid, tile_format, auto_zoom, min_zoom, max_zoom):
+        # 影像切片
+        config = Config()
+        connection = Connection(config)
+
+        # 添加任务
+        q = rq.Queue(name='default', connection=connection.redis_conn)
+        output_path = config.common.OUTPUT_PATH
+        q.enqueue(image_slice_job,
+                  kwargs={"data_path": data_path,
+                          "output_path": output_path,
+                          "tile_size": tile_size,
+                          "tile_grid": tile_grid,
+                          "tile_format": tile_format,
+                          "auto_zoom": auto_zoom,
+                          "min_zoom": min_zoom,
+                          "max_zoom": max_zoom}
+                  )

+ 59 - 0
src/connection.py

@@ -0,0 +1,59 @@
+import rq
+
+from redis import Redis
+
+
+class Connection:
+
+    def __init__(self, config):
+        # redis配置信息
+        self.redis_host = config.redis.HOST
+        self.redis_port = config.redis.PORT
+        self.redis_db = config.redis.DB
+
+        # db配置信息
+        self.db_host = config.database.HOST
+        self.db_port = config.database.PORT
+        self.db_user = config.database.USER
+        self.db_pwd = config.database.PASSWORD
+        self.db_db = config.database.DB
+
+    def __call__(self, *args, **kwargs):
+        redis = Redis \
+                (
+                host=self.redis_host,
+                port=self.redis_port,
+                db=self.redis_db
+            )
+
+        return rq.Connection(redis)
+
+    @staticmethod
+    def current():
+        kwargs = rq.get_current_connection().get_connection_kwargs()
+
+        kwargs["database"] = kwargs["db"]
+
+        return Connection(**kwargs)
+
+    @property
+    def redis_uri(self):
+        """获取redis的uri"""
+
+        return 'redis://{}:{}/{}'.format(self.redis_host, self.redis_port, self.redis_db)
+
+    @property
+    def redis_conn(self):
+        """获取redis的conn"""
+
+        return Redis(host=self.redis_host, port=self.redis_port, db=self.redis_db, health_check_interval=30)
+
+    @property
+    def db_uri(self):
+        """获取postgressql的uri"""
+
+        return "postgresql+psycopg2://{user}:{pwd}@{host}:{port}/{db}".format(user=self.db_user,
+                                                                              pwd=self.db_pwd,
+                                                                              host=self.db_host,
+                                                                              port=self.db_port,
+                                                                              db=self.db_db)

+ 0 - 0
src/mission/__init__.py


+ 80 - 0
src/mission/image_slice.py

@@ -0,0 +1,80 @@
+import os
+import shutil
+
+from app.defines import TILE_GRID_TYPE
+from app.utils.create_geojsonl import create_default_geojsonl, calc_minmax_zoom2, verify_geotiff
+from config import Config
+from starearth import FileSystem, Grid
+from starearth.storage.osm_zxy import StorageOSMZXY
+from slice import slice
+
+config = Config()
+
+
+def slice_zxy(
+        input_file,
+        output_path,
+        tile_size,
+        tile_grid,
+        tile_format,
+        auto_zoom,
+        min_zoom,
+        max_zoom,
+        nodata=0,
+        enable_msmt=0,
+        render_type="RGB",
+        channels=[1, 2, 3],
+        merging=3,
+):
+    epsg = verify_geotiff(input_file, None)
+
+    filesystem_cache = FileSystem(config.common.CACHE_PATH)
+
+    tmp_tiles = filesystem_cache.path('tiles')
+
+    sliceTiler_type_dict = {TILE_GRID_TYPE.WebMercatorQuad: Grid.WebMercatorQuad,
+                            TILE_GRID_TYPE.WGS1984Quad: Grid.WGS1984Quad,
+                            TILE_GRID_TYPE.GoogleEarthQuad: Grid.GoogleEarthQuad}
+
+    sliceTiler_type = sliceTiler_type_dict[tile_grid]
+
+    if auto_zoom:
+        min_zoom, max_zoom = calc_minmax_zoom2(input_file, epsg, tile_size)
+
+    geojsonl = os.path.splitext(input_file)[0] + '.geojsonl'
+
+    if not os.path.exists(str(geojsonl)):
+        create_default_geojsonl(input_file, geojsonl, {"date": os.path.splitext(os.path.basename(input_file))[0]})
+
+    slice \
+            (
+            input_file=input_file,
+            tile_grid=sliceTiler_type,
+            tile_size=tile_size,
+            tmp_tiles=tmp_tiles,
+            epsg=epsg,
+            nodata=nodata,
+            tile_format=tile_format,
+            min_zoom=min_zoom,
+            max_zoom=max_zoom,
+            enable_msmt=enable_msmt,
+            channels=channels,
+            render_type=render_type,
+            lut=None,
+        )
+
+    base_name = os.path.splitext(os.path.basename(input_file))[0]
+
+    storage_osmzxy_obj = StorageOSMZXY(
+        tiles_path=os.path.join(tmp_tiles, base_name),
+        min_zoom=min_zoom,
+        max_zoom=max_zoom,
+        merging=merging,
+        output_path=output_path,
+        tile_format=tile_format
+    )
+    storage_osmzxy_obj.storage()
+
+    # 删除临时目录
+    if os.path.exists(os.path.join(tmp_tiles, base_name)):
+        shutil.rmtree(os.path.join(tmp_tiles, base_name))

+ 239 - 0
src/slice.py

@@ -0,0 +1,239 @@
+import json
+import os
+import piexif
+
+from starearth import Filename, Scene, GridFactory
+from starearth import Nodata, Slicer
+from starearth.coordinate import EPSG, Coordinate
+from starearth.output.raw import OutputRAW
+from starearth.output.png import OutputPNG
+from starearth.output.jpeg import OutputJPEG
+from starearth.renderer.greyscale import RendererGreyscale
+from starearth.renderer.rgb import RendererRGB
+from starearth.sheet import Sheet
+from starearth.utils.general_utils import print_log
+
+
+def get_size(path):
+    total_size = 0
+    for dirpath, dirnames, filenames in os.walk(path):
+        for f in filenames:
+            fp = os.path.join(dirpath, f)
+            # skip if it is symbolic link
+            if not os.path.islink(fp):
+                total_size += os.path.getsize(fp)
+
+    return total_size
+
+
+class Accessory:
+
+    def __init__(self):
+        pass
+
+    def structure_exif_dict(self, properties):
+        """构造一个exif_dict字典,这样不用写多条exif信息"""
+
+        date = properties.get('date', '')
+        date_time = properties.get('date_time', '')
+
+        eoc = properties.get('eoc', '')
+        eop = properties.get('eop', '')
+        eoi = properties.get('eoi', '')
+        res = properties.get('res', '')
+        eopd = properties.get('eopd', '')
+        eopc = properties.get('eopc', '')
+        user = properties.get('user', '')
+        disp = properties.get('disp', 0)
+        bk1 = properties.get('bk1', '')
+        bk2 = properties.get('bk2', '')
+        bk3 = properties.get('bk3', '')
+
+        exif_dict = {'Exif': {}, '0th': {}, 'Interop': {}, '1st': {}, 'thumbnail': None, 'GPS': {}}
+
+        _time = date or date_time or 'defalut'
+
+        description = '%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s' % (eoc, eop, eoi, res, eopd, eopc, user, bk1, bk2, bk3, disp)
+
+        zeroth_ifd = {
+            piexif.ImageIFD.ImageDescription: description.encode('unicode_escape').decode('ascii'),
+            piexif.ImageIFD.DateTime: date_time,
+        }
+
+        add_exif_dict = {"0th": zeroth_ifd}  # 使用0th这个key
+        exif_dict = dict(exif_dict, **add_exif_dict)  # 将构造的exif_dict添加到照片原有的exif_dict中
+
+        return piexif.dump(exif_dict), _time  # 返回最终的exif_dicttile_content
+
+
+def get_coordinate(coordinates, all_coordinates):
+    for coordinate in coordinates:
+
+        if not isinstance(coordinate[0], list):
+            all_coordinates.append(coordinate)
+            pass
+
+        else:
+            get_coordinate(coordinate, all_coordinates)
+        pass
+
+    pass
+
+
+def calc_bbox(coordinates):
+    longitude_max = -180
+    longitude_min = 180
+    latitude_max = -90
+    latitude_min = 90
+
+    # 获取所有单独的coordinate
+    all_coordinate = []
+    get_coordinate(coordinates, all_coordinate)
+
+    # 获取bbox范围
+    for coordinate in all_coordinate:
+        longitude_max = max(longitude_max, coordinate[0])
+        longitude_min = min(longitude_min, coordinate[0])
+
+        latitude_max = max(latitude_max, coordinate[1])
+        latitude_min = min(latitude_min, coordinate[1])
+
+    bbox = [longitude_min, latitude_min, longitude_max, latitude_max]
+
+    return bbox
+
+
+def slice \
+                (
+                input_file, tile_grid, tile_size, tmp_tiles, epsg=None,
+                nodata=None, tile_format='png', min_zoom=None, max_zoom=None,
+                enable_msmt=0, channels=None, lut=None, render_type=None
+        ):
+    sliceTiler = GridFactory.create(tile_grid)
+
+    # 格式化
+    tile_format = tile_format.strip().lower()
+    if render_type:
+        render_type = render_type.strip().upper()
+
+    # new_input_file = filesystem.resolve(input_file)
+
+    filename = Filename(input_file)
+    base_name = os.path.splitext(input_file)[0]
+    geojsonl = base_name + '.geojsonl'
+
+    scene = Scene()
+
+    scene.load(filename)
+
+    coordinate = None
+
+    if sliceTiler.identifier == 'WGS1984Quad':
+
+        pass
+
+    elif sliceTiler.identifier == 'WebMercatorQuad':
+
+        coordinate = Coordinate.EPSG_3857
+
+    elif sliceTiler.identifier == 'GoogleEarthQuad':
+
+        coordinate = Coordinate.EPSG_4326
+
+    # 瓦片成果是否携带时相信息
+    msmt_accessory = Accessory() if enable_msmt else None
+
+    # min_zoom, max_zoom = 12, 13  # 测试用,手动设置一个级别范围
+    print_log('min-max:%s-%s' % (min_zoom, max_zoom))
+
+    print_log('开始切片...')
+    # star_time = time.time()
+
+    # 选择不同的渲染类型
+    if render_type == 'RGB':
+        # 彩色渲染
+        renderer = RendererRGB(scene.datatype, channels, nodata)
+
+    elif render_type == 'GREYSCALE':
+        # 灰度图的渲染
+        renderer = RendererGreyscale(scene.datatype, lut, nodata)
+
+    else:
+        renderer = None
+
+    if tile_format == 'png':
+
+        output = OutputPNG \
+                (
+                tmp_tiles,
+                msmt_accessory,
+                renderer=renderer
+            )
+
+        pass
+
+    elif tile_format == 'jpeg':
+
+        output = OutputJPEG \
+                (
+                tmp_tiles,
+                msmt_accessory,
+                renderer=renderer
+            )
+
+    else:
+
+        output = OutputRAW \
+                (
+                tmp_tiles,
+                msmt_accessory,
+                nodata=Nodata([nodata for _ in range(scene.bands)]),
+            )
+
+    square = tile_size
+
+    slicer = Slicer(scene, square, output, sliceTiler)
+
+    sheets = None
+
+    if filename.geojsonl:
+        sheets = Sheet.parse_geojsonl(filename.geojsonl)
+
+    if isinstance(epsg, str):
+        epsg_id = int(epsg.split(':')[-1])
+
+    else:
+        epsg_id = epsg
+
+    slicer.slice(min_zoom, max_zoom, epsg_id, sheets, coordinate=coordinate)
+    # slicer._slice_sp(min_zoom, max_zoom, epsg_id, sheets)
+
+    # 瓦片列表写入文件
+    with open(geojsonl, 'r') as f:
+        new_feature = json.loads(f.read())
+    date = new_feature['properties']['date']
+    tiles = sliceTiler.tiles([new_feature], min_zoom, max_zoom)
+
+    tiles_list_json = tmp_tiles.path(date).resolve('tiles_list.json')
+
+    exist_tiles_list_json = []
+    if os.path.exists(tiles_list_json):
+        with open(tiles_list_json, 'r', encoding='utf-8') as fp:
+            exist_tiles_list_json = json.load(fp)
+
+    with open(tiles_list_json, 'w') as fp:
+        if exist_tiles_list_json:
+            for tile in exist_tiles_list_json:
+                if tile not in tiles:
+                    tiles.append(tile)
+
+        json.dump(tiles, fp)
+
+    tiles_nums = 0
+    tiles_nums += len(tiles)
+
+    print_log('执行切片完成.')
+    # end_time = time.time()
+
+    # print_log('切片数量为:{} 个'.format(tiles_nums))
+    # print_log("切片用时: {}'s".format(end_time - star_time))

+ 34 - 0
src/starearth/__init__.py

@@ -0,0 +1,34 @@
+from .bound                         import Bound
+from .colormap                      import ColorMap
+from .coordinate                    import Coordinate
+from .datatype                      import DataType
+from .filename                      import Filename
+from .filesystem.filesystem         import FileSystem
+from .filesystem.cache              import FileSystemCache
+from .filesystem                    import FileSystemFactory
+from .grid                          import Grid, GridFactory
+from .nodata                        import Nodata
+from .scene                         import Scene
+from .slicer                        import Slicer
+from .renderer.rgb                  import RendererRGB
+from .renderer.greyscale            import RendererGreyscale
+
+
+__all__ = \
+    [
+        'Bound',
+        'ColorMap',
+        'Coordinate',
+        'DataType',
+        'Filename',
+        'FileSystem',
+        'FileSystemCache',
+        'FileSystemFactory',
+        'Grid',
+        'GridFactory',
+        'Nodata',
+        'Scene',
+        'Slicer',
+        'RendererRGB',
+        'RendererGreyscale',
+    ]

+ 188 - 0
src/starearth/bound.py

@@ -0,0 +1,188 @@
+
+# 一个简单的二维向量 Vector2
+
+class V2:
+
+    def __init__(self, x, y):
+
+        self.x = x
+        self.y = y
+
+        pass
+
+    pass
+
+
+# 一个简单的范围碰撞计算类
+
+class R2:
+
+    def __init__(self, minimum, maximum):
+
+        self._data = (minimum, maximum)
+
+        pass
+
+    @property
+    def min(self): return self._data[0]
+
+    @property
+    def max(self): return self._data[1]
+
+    @property
+    def length(self): return self._data[1] - self._data[0]
+
+    def intersect(self, other):
+
+        if self.min > other.max or self.max < other.min:
+
+            return None
+
+        minimum = self.min if self.min > other.min else other.min
+        maximum = self.max if self.max < other.max else other.max
+
+        return R2(minimum, maximum)
+
+    pass
+
+
+class Bound:
+
+    class CartesianLH:
+
+        @staticmethod
+        def top   (r): return r.min
+
+        @staticmethod
+        def bottom(r): return r.max
+
+        pass
+
+    class CartesianRH:
+
+        @staticmethod
+        def top   (r): return r.max
+
+        @staticmethod
+        def bottom(r): return r.min
+
+        pass
+
+    def __init__(self, x_min, x_max, y_min, y_max, cartesian):
+
+        self._rx = R2(x_min, x_max)
+        self._ry = R2(y_min, y_max)
+
+        self._cartesian = cartesian
+
+        pass
+
+    @property
+    def left  (self): return self._rx.min
+
+    @property
+    def right (self): return self._rx.max
+
+    @property
+    def top   (self): return self._cartesian.top   (self._ry)
+
+    @property
+    def bottom(self): return self._cartesian.bottom(self._ry)
+
+    @property
+    def west (self): return self.left
+
+    @property
+    def east (self): return self.right
+
+    @property
+    def north(self): return self.top
+
+    @property
+    def south(self): return self.bottom
+
+    @property
+    def x(self): return self._rx.min
+
+    @property
+    def y(self): return self._ry.min
+
+    @property
+    def width (self): return self._rx.length
+
+    @property
+    def height(self): return self._ry.length
+
+    @property
+    def value(self):
+
+        return self.west, self.south, self.east, self.north
+
+    @property
+    def coordinates(self):
+
+        return \
+            [
+                [self.west, self.south],
+                [self.east, self.south],
+                [self.east, self.north],
+                [self.west, self.north],
+                [self.west, self.south]
+            ]
+
+    def intersect(self, other):
+
+        other: Bound = other
+
+        ix = self._rx.intersect(other._rx)
+        iy = self._ry.intersect(other._ry)
+
+        if ix is None or iy is None:
+
+            return None
+
+        return Bound(ix.min, ix.max, iy.min, iy.max, self._cartesian)
+
+    @staticmethod
+    def LH(left, top, right, bottom):
+
+        return Bound(left, right, top, bottom, Bound.CartesianLH)
+
+    @staticmethod
+    def RH(left, bottom, right, top):
+
+        return Bound(left, right, bottom, top, Bound.CartesianRH)
+
+    @staticmethod
+    def LH_XYWH(x, y, width, height):
+
+        return Bound.LH(x, y, x + width, y + height)
+
+    @staticmethod
+    def RH_XYWH(x, y, width, height):
+
+        return Bound.RH(x, y, x + width, y + height)
+
+    @staticmethod
+    def from_transform(width: int, height: int, transform):
+
+        """
+        使用从 GDAL 中获取的信息构造边界对象
+
+        :param width:     图像宽度
+        :param height:    图像高度
+        :param transform: 空间变换数据
+        :return:          边界对象
+        """
+
+        rx = transform[1]
+        ry = transform[5]
+
+        west  = transform[0]
+        north = transform[3]
+        east  = west  + width  * rx
+        south = north + height * ry
+
+        return Bound.RH(west, south, east, north)
+
+    pass

+ 87 - 0
src/starearth/cache.py

@@ -0,0 +1,87 @@
+import os
+from PIL import Image
+
+
+class CacheFile:
+
+    def __init__(self, storage, formatter):
+
+        self._data = {}
+
+        self._storage   = storage
+        self._formatter = formatter
+
+        pass
+
+    def put(self, x, y, z, image):
+
+        pass
+
+    def take(self, x, y, z, blank, date=None):
+
+        extension = 'jpeg'
+
+        fn_jpeg = self._formatter.format(x, y, z, extension)
+
+        if date:
+
+            c_filename_dict = self._storage.path(date).find_ox_oy(fn_jpeg)
+
+            filename = self._storage.path(date).resolve(fn_jpeg)
+
+        else:
+            c_filename_dict = self._storage.find_ox_oy(fn_jpeg)
+
+            filename = self._storage.resolve(fn_jpeg)
+
+        if c_filename_dict:
+            return Image.open(c_filename_dict['filename']), {'extension': extension, 'ox': c_filename_dict['ox'], 'oy': c_filename_dict['oy']}
+
+        if os.path.exists(filename):
+            return Image.open(filename), {'extension': extension, 'ox': 0, 'oy': 0}
+
+        extension = 'png'
+
+        fn_png = self._formatter.format(x, y, z, extension)
+
+        if date:
+
+            c_filename_dict = self._storage.path(date).find_ox_oy(fn_png)
+
+            filename = self._storage.path(date).resolve(fn_png)
+
+        else:
+            c_filename_dict = self._storage.find_ox_oy(fn_png)
+
+            filename = self._storage.resolve(fn_png)
+
+        if c_filename_dict:
+            return Image.open(c_filename_dict['filename']), {'extension': extension, 'ox': c_filename_dict['ox'], 'oy': c_filename_dict['oy']}
+
+        if os.path.exists(filename):
+            return Image.open(filename).convert('RGB'), {'extension': extension, 'ox': 0, 'oy': 0}
+
+        return blank, {'ox': 0, 'oy': 0}
+
+    pass
+
+
+class CacheMemory:
+
+    def __init__(self, dictionary):
+
+        self._dictionary = dictionary
+
+        pass
+
+    def put(self, x, y, z, image):
+
+        self._dictionary[(x, y, z)] = image
+
+        pass
+
+    def take(self, x, y, z, blank, date=None):
+
+        return self._dictionary.pop((x, y, z), blank), None
+
+    pass

+ 40 - 0
src/starearth/cog_convert.py

@@ -0,0 +1,40 @@
+import os
+import shutil
+import subprocess
+
+
+def cog_convert(input_path, output_path, txt_enable):
+
+    for root, dirs, files in os.walk(input_path):
+
+        for tif_name in files:
+
+            name, ext = os.path.splitext(tif_name)
+            ext = ext.strip('.').lower()
+            if ext not in ['tif', 'tiff', ]:
+                continue
+
+            relative_path = root[len(input_path):]
+
+            output_path = os.path.join(output_path, relative_path.strip('/'))
+
+            if not os.path.exists(output_path):
+                os.makedirs(output_path, exist_ok=True)
+
+            src_tif_path = os.path.join(root, tif_name)
+            dst_tif_path = os.path.join(output_path, tif_name)
+
+            # 如果gdal_translate命令不存在环境便令中,需要指定具体的路径
+            # cmd = "/usr/softwares/GDAL/bin/gdal_translate {} {} -of COG -co TILING_SCHEME=GoogleMapsCompatible -co COMPRESS=LZW".format(src_tif_path, dst_tif_path)
+
+            cmd = "gdal_translate {} {} -of COG -co TILING_SCHEME=GoogleMapsCompatible -co COMPRESS=LZW".format(src_tif_path, dst_tif_path)
+
+            p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+
+            p.wait()
+
+            src_txt_path = os.path.join(root, name + '.txt')
+
+            # 是否拷贝txt文件,如果拷贝txt文件则先判断txt文件是否存在
+            if txt_enable and os.path.exists(src_txt_path):
+                shutil.copy(src_txt_path, output_path)

Разница между файлами не показана из-за своего большого размера
+ 127 - 0
src/starearth/colormap.py


+ 97 - 0
src/starearth/coordinate.py

@@ -0,0 +1,97 @@
+from osgeo import gdal, osr
+
+
+class Coordinate:
+
+    EPSG_3857 = None
+    EPSG_4326 = None
+
+    def __init__(self, name: str, code: int):
+
+        sr = osr.SpatialReference()
+
+        sr.ImportFromEPSG(code)
+
+        wkt = sr.ExportToWkt()
+
+        self._name = name
+        self._code = code
+
+        self._wkt = wkt
+
+        pass
+
+    def __eq__(self, other):
+
+        return self._name == other.name and self._code == other.code
+
+    def __ne__(self, other):
+
+        return self._name != other.name or self._code != other.code
+
+    @property
+    def name(self) -> str:
+
+        return self._name
+
+    @property
+    def code(self) -> int:
+
+        return self._code
+
+    @property
+    def wkt(self) -> str:
+
+        return self._wkt
+
+    @staticmethod
+    def warp(dataset, cs):
+
+        return gdal.AutoCreateWarpedVRT \
+            (
+                dataset,
+                dataset.GetProjection(),
+                cs.wkt
+            )
+
+    @staticmethod
+    def from_wkt(wkt):
+
+        if wkt == "":
+
+            return Coordinate("", 0)
+            # return Coordinate("EPSG", 4326)
+
+        sr = osr.SpatialReference()
+
+        sr.ImportFromWkt(wkt)
+
+        if sr.IsProjected():
+
+            name = sr.GetAuthorityName("PROJCS")
+            code = sr.GetAuthorityCode("PROJCS")
+
+            # gcs = sr.CloneGeogCS()
+
+            pass
+
+        else:
+
+            name = sr.GetAuthorityName("GEOGCS")
+            code = sr.GetAuthorityCode("GEOGCS")
+
+            pass
+
+        return Coordinate(name, int(code))
+
+    pass
+
+
+def EPSG(code):
+
+    return Coordinate("EPSG", code)
+
+
+Coordinate.EPSG_3857 = EPSG(3857)
+Coordinate.EPSG_4326 = EPSG(4326)
+

+ 106 - 0
src/starearth/datatype.py

@@ -0,0 +1,106 @@
+import numpy      as np
+import osgeo.gdal as gdal
+
+
+class DataType:
+
+    def __init__(self, name, bits, min_max=None):
+
+        self._name = name
+        self._bits = bits
+
+        self._range = min_max
+
+        pass
+
+    def __str__(self):
+
+        return self._name
+
+    @property
+    def name(self):
+
+        return self._name
+
+    @property
+    def bits(self):
+
+        return self._bits
+
+    @property
+    def range(self):
+
+        return self._range
+
+    def to_gdal(self):
+
+        return DataType._to_gdal[self._name]
+
+    def to_numpy(self):
+
+        return DataType._to_numpy[self._name]
+
+    @staticmethod
+    def from_gdal(value, min_max=None):
+
+        name, bits = DataType._from_gdal.get \
+            (
+                value,
+                ('unsupported type: {}'.format(value), 0)
+            )
+
+        return DataType(name, bits, min_max)
+
+    @staticmethod
+    def from_numpy(value, min_max=None):
+
+        name, bits = DataType._from_numpy.get \
+            (
+                value.name,
+                ('unsupported type: {}'.format(value.name), 0)
+            )
+
+        return DataType(name, bits, min_max)
+
+    _from_gdal = \
+        {
+            gdal.GDT_Byte:    ('uint8',    8),
+            gdal.GDT_UInt16:  ('uint16',  16),
+            gdal.GDT_UInt32:  ('uint32',  32),
+            gdal.GDT_Int16:   ('int16',   16),
+            gdal.GDT_Int32:   ('int32',   32),
+            gdal.GDT_Float32: ('float32', 32)
+        }
+
+    _from_numpy = \
+        {
+            'uint8':   ('uint8',    8),
+            'uint16':  ('uint16',  16),
+            'uint32':  ('uint32',  32),
+            'int16':   ('int16',   16),
+            'int32':   ('int32',   32),
+            'float32': ('float32', 32)
+        }
+
+    _to_gdal = \
+        {
+            'uint8':   gdal.GDT_Byte,
+            'uint16':  gdal.GDT_UInt16,
+            'uint32':  gdal.GDT_UInt32,
+            'int16':   gdal.GDT_Int16,
+            'int32':   gdal.GDT_Int32,
+            'float32': gdal.GDT_Float32
+        }
+
+    _to_numpy = \
+        {
+            'uint8':   np.uint8,
+            'uint16':  np.uint16,
+            'uint32':  np.uint32,
+            'int16':   np.int16,
+            'int32':   np.int32,
+            'float32': np.float32
+        }
+
+    pass
+

+ 50 - 0
src/starearth/filename.py

@@ -0,0 +1,50 @@
+import os
+
+
+class Filename:
+
+    def __init__(self, path):
+
+        if not os.path.exists(path):
+
+            raise Exception(f"{path} not exist.")
+
+        if not os.path.isfile(path):
+
+            raise Exception(f"{path} is not a file.")
+
+        directory, target = os.path.split(path)
+
+        name, extension = os.path.splitext(target)
+
+        geojsonl = os.path.join(directory, f"{name}.geojsonl")
+
+        self._directory = directory
+        self._name = name
+
+        self._value = os.path.join(directory, f"{target}")
+
+        self._geojsonl = geojsonl if os.path.exists(geojsonl) else None
+
+        pass
+
+    def __str__(self):
+
+        return self._value
+
+    @property
+    def directory(self):
+
+        return self._directory
+
+    @property
+    def name(self):
+
+        return self._name
+
+    @property
+    def geojsonl(self):
+
+        return self._geojsonl
+
+    pass

+ 50 - 0
src/starearth/filesystem/__init__.py

@@ -0,0 +1,50 @@
+from starearth.filesystem.filesystem import FileSystem
+from starearth.filesystem.cache import FileSystemCache
+
+
+class FileSystemFactory:
+
+    def __init__(self):
+        self._creators = {
+            'FileSystem': FileSystem,
+            'FileSystemCache': FileSystemCache,
+        }
+
+    _instance = None
+
+    @staticmethod
+    def _singleton():
+        if FileSystemFactory._instance is None:
+            FileSystemFactory._instance = FileSystemFactory()
+
+            pass
+
+        return FileSystemFactory._instance
+
+    @staticmethod
+    def create(name, *args):
+
+        instance = FileSystemFactory._singleton()
+
+        if name not in instance._creators.keys():
+            raise Exception('{} is invalid '.format(name))
+
+        creator = instance._creators[name]
+
+        if creator is None:
+            # return None # 或者 raise 'unsupported object type'
+
+            raise Exception('unsupported object type')
+
+        return creator(*args)
+
+    @staticmethod
+    def register(name, creator):
+
+        instance = FileSystemFactory._singleton()
+
+        instance._creators[name] = creator
+
+        pass
+
+    pass

+ 69 - 0
src/starearth/filesystem/cache.py

@@ -0,0 +1,69 @@
+import os
+import shutil
+
+
+class FileSystemCache:
+    # TODO yue.zijian & jin.qingchun
+    def __init__(self, tmp_geotiff='./', tmp_tiles='./', tmp_ptp='./'):
+        self.tmp_geotiff = self._ensure(tmp_geotiff)
+        self.tmp_tiles = self._ensure(tmp_tiles)
+        self.tmp_ptp = self._ensure(tmp_ptp)
+        self.file_list = []
+        pass
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        # # 删除缓存的瓦片目录
+        # shutil.rmtree(self.tmp_tiles)
+        # os.makedirs(self.tmp_tiles, exist_ok=True)
+        #
+        # # os.system('rm -rf %s' % self.tmp_tiles)
+        # # os.system('mkdir %s' % self.tmp_tiles)
+        #
+        # for each_path in self.file_list:
+        #     # 删除缓存的原始文件(*.tif/*.ovr/*.tfw/*.geojsonl)
+        #     if os.path.exists(each_path):
+        #         os.remove(each_path)
+        #
+        #     # os.system('rm -rf %s' % each_path)
+        #
+        # # 删除缓存的临时ptp
+        # shutil.rmtree(self.tmp_ptp)
+        # os.makedirs(self.tmp_ptp, exist_ok=True)
+        #
+        # # os.system('rm -rf %s/*' % self.tmp_ptp)
+        pass
+
+    def resolve(self, path):
+        """下载影像和元数据到/cache缓存目录"""
+        file_path = os.path.join(self.tmp_geotiff, os.path.basename(path))
+        if os.path.exists(path):
+            shutil.copy(path, file_path)
+            self.file_list.append(file_path)
+
+        tif_ovr = path + '.ovr'
+        new_tif_ovr = os.path.join(self.tmp_geotiff, os.path.basename(tif_ovr))
+        if os.path.exists(tif_ovr):
+            shutil.copy(tif_ovr, new_tif_ovr)
+            self.file_list.append(new_tif_ovr)
+
+        tif_tfw = os.path.splitext(path)[0] + '.tfw'
+        new_tif_tfw = os.path.join(self.tmp_geotiff, os.path.basename(tif_tfw))
+        if os.path.exists(tif_tfw):
+            shutil.copy(tif_tfw, new_tif_tfw)
+            self.file_list.append(new_tif_tfw)
+        return file_path
+
+    def _ensure(self, path):
+
+        if not os.path.exists(path):
+            
+            os.makedirs(path, exist_ok=True)
+
+            pass
+
+        return str(path)
+
+    pass

+ 52 - 0
src/starearth/filesystem/filesystem.py

@@ -0,0 +1,52 @@
+import glob
+import os
+
+
+class FileSystem:
+
+    def __init__(self, root='./'):
+
+        self._root = self._ensure(root)
+
+        self._paths = {}
+
+        pass
+
+    def __str__(self):
+
+        return self._root
+
+    def path(self, name):
+
+        if name not in self._paths:
+
+            path = self._ensure(os.path.join(self._root, name.strip('/')))
+
+            self._paths[name] = FileSystem(path)
+
+            pass
+
+        return self._paths[name]
+
+    def resolve(self, name):
+
+        return os.path.join(self._root, name.strip('/'))
+
+    def list(self, extension='*'):
+
+        files = \
+            [
+                os.path.basename(f) for f in glob.glob(os.path.join(self._root, '*.{}'.format(extension)))
+            ]
+
+        return sorted(files)
+
+    def _ensure(self, path):
+
+        if not os.path.exists(path):
+
+            os.makedirs(path, exist_ok=True)
+
+            pass
+
+        return str(path)

+ 35 - 0
src/starearth/grid/__init__.py

@@ -0,0 +1,35 @@
+from .grid import Grid
+
+from .ge  import GridGoogleEarth
+from .mct import GridMorecantile
+from .sm  import GridSupermercado
+
+
+class GridFactory:
+
+    @staticmethod
+    def create(identifier):
+
+        if identifier == Grid.GoogleEarthQuad:
+
+            return GridGoogleEarth(Grid.GoogleEarthQuad)
+
+        if identifier == Grid.WebMercatorQuad:
+
+            return GridSupermercado(Grid.WebMercatorQuad)
+
+        if identifier == Grid.WGS1984Quad:
+
+            return GridMorecantile(Grid.WGS1984Quad)
+
+        return None
+
+    pass
+
+
+__all__ = \
+    [
+        'Grid',
+        'GridFactory'
+    ]
+

+ 109 - 0
src/starearth/grid/ge.py

@@ -0,0 +1,109 @@
+from .grid import Grid
+
+import numpy as np
+
+import rasterio.features
+import rasterio.transform
+
+from starearth import Bound
+
+from supermercado import super_utils  # 临时使用这个函数
+
+
+class GridGoogleEarth(Grid):
+
+    def __init__(self, identifier):
+
+        super().__init__(identifier)
+
+        pass
+
+    def tiles(self, features, z_min, z_max=None):
+
+        bound = Grid.bound_by(features)
+
+        polygons = [f for f in super_utils.filter_features(features)]
+
+        zooms = [z_min] if z_max is None else [z for z in range(z_min, z_max + 1)]
+
+        tiles = []
+
+        for z in zooms:
+
+            minimum = self.tile_of(bound[0], bound[3], z)
+            maximum = self.tile_of(bound[2], bound[1], z)
+
+            tile_range = \
+                {
+                    "x": { "min": minimum[0], "max": maximum[0] + 1 },
+                    "y": { "min": minimum[1], "max": maximum[1] + 1 }
+                }
+
+            out_shape = \
+                [
+                    tile_range["y"]["max"] - tile_range["y"]["min"],
+                    tile_range["x"]["max"] - tile_range["x"]["min"]
+                ]
+
+            transform = rasterio.transform.from_bounds \
+                (
+                    bound[0],
+                    bound[1],
+                    bound[2],
+                    bound[3],
+                    out_shape[0],
+                    out_shape[1]
+                )
+
+            shapes = [(geom["geometry"], 255) for geom in polygons]
+
+            burn = rasterio.features.rasterize \
+                (
+                    shapes=shapes,
+                    out_shape=out_shape,
+                    transform=transform,
+                    all_touched=True,
+                )
+
+            xys = np.fliplr(np.dstack(np.where(burn))[0])
+
+            xys[:, 0] += tile_range["x"]["min"]
+            xys[:, 1] += tile_range["y"]["min"]
+
+            tiles.extend(np.append(xys, np.zeros((xys.shape[0], 1), dtype=np.uint8) + z, axis=1).tolist())
+
+            pass
+
+        return tiles
+
+    def bound_of(self, x, y, z):
+
+        # 瓦片行列号转化为坐标范围
+        # 每个瓦片的经差和纬差
+
+        angle_per_tile = 360.0 / float(2 ** z)
+
+        w = x * angle_per_tile - 180.0
+        n = (2 ** z - y) * angle_per_tile - 180.0
+        e = w + angle_per_tile
+        s = n - angle_per_tile
+
+        return Bound.RH(w, s, e, n)
+
+    def tile_of(self, longitude, latitude, zoom):
+
+        # 经纬度转化为瓦片行列号
+
+        b = longitude + 180.0
+        l = latitude + 180.0
+
+        angle_per_tile = 360.0 / float(2 ** zoom)  # 每个瓦片的经度差和纬度差
+
+        x = int(b / angle_per_tile)
+        y = int(l / angle_per_tile)
+        y = (2 ** zoom - 1) - y
+
+        return x, y
+
+    pass
+

+ 59 - 0
src/starearth/grid/grid.py

@@ -0,0 +1,59 @@
+from morecantile.scripts.cli import coords
+
+
+class Grid:
+
+    GoogleEarthQuad = "GoogleEarthQuad"
+    WebMercatorQuad = "WebMercatorQuad"
+    WGS1984Quad     = "WGS1984Quad"
+
+    def __init__(self, identifier):
+
+        self._identifier = identifier
+
+        pass
+
+    @property
+    def identifier(self):
+
+        return self._identifier
+
+    def tiles(self, features, z_min, z_max=None):
+
+        return []
+
+    def bound_of(self, x, y, z):
+
+        return None
+
+    def tile_of(self, longitude, latitude, zoom):
+
+        return None
+
+    @staticmethod
+    def bound_by(features) -> tuple[float, float, float, float]:
+
+        bound = None
+
+        for feature in features:
+
+            xs = []
+            ys = []
+
+            for data in feature.get("features", [feature]):
+
+                lngs, lats = zip(*list(coords(data)))
+
+                xs.extend([min(lngs), max(lngs)])
+                ys.extend([min(lats), max(lats)])
+
+                pass
+
+            bound = min(xs), min(ys), max(xs), max(ys)
+
+            pass
+
+        return bound
+
+    pass
+

+ 97 - 0
src/starearth/grid/mct.py

@@ -0,0 +1,97 @@
+from .grid import Grid
+
+import morecantile as mct
+
+import numpy as np
+
+import rasterio.features
+import rasterio.transform
+
+from starearth import Bound
+
+from supermercado import super_utils  # 临时使用这个函数
+
+
+
+class GridMorecantile(Grid):
+
+    def __init__(self, identifier):
+
+        super().__init__(identifier)
+
+        self._tms = mct.tms.get(identifier)
+
+        pass
+
+    def tiles(self, features, z_min, z_max=None):
+
+        bound = Grid.bound_by(features)
+
+        polygons = [f for f in super_utils.filter_features(features)]
+
+        zooms = [z_min] if z_max is None else [z for z in range(z_min, z_max + 1)]
+
+        tiles = []
+
+        for z in zooms:
+
+            minimum = self.tile_of(bound[0], bound[3], z)
+            maximum = self.tile_of(bound[2], bound[1], z)
+
+            tile_range = \
+                {
+                    "x": { "min": minimum[0], "max": maximum[0] + 1 },
+                    "y": { "min": minimum[1], "max": maximum[1] + 1 }
+                }
+
+            out_shape = \
+                [
+                    tile_range["y"]["max"] - tile_range["y"]["min"],
+                    tile_range["x"]["max"] - tile_range["x"]["min"]
+                ]
+
+            transform = rasterio.transform.from_bounds \
+                (
+                    bound[0],
+                    bound[1],
+                    bound[2],
+                    bound[3],
+                    out_shape[0],
+                    out_shape[1]
+                )
+
+            shapes = [(geom["geometry"], 255) for geom in polygons]
+
+            burn = rasterio.features.rasterize \
+                (
+                    shapes      = shapes,
+                    out_shape   = out_shape,
+                    transform   = transform,
+                    all_touched = True
+                )
+
+            xys = np.fliplr(np.dstack(np.where(burn))[0])
+
+            xys[:, 0] += tile_range["x"]["min"]
+            xys[:, 1] += tile_range["y"]["min"]
+
+            tiles.extend(np.append(xys, np.zeros((xys.shape[0], 1), dtype=np.uint8) + z, axis=1).tolist())
+
+            pass
+
+        return tiles
+
+    def bound_of(self, x, y, z):
+
+        b = self._tms.xy_bounds(mct.Tile(x=x, y=y, z=z))
+
+        return Bound.RH(b.left, b.bottom, b.right, b.top)
+
+    def tile_of(self, longitude, latitude, zoom):
+
+        tile = self._tms.tile(longitude, latitude, zoom)
+
+        return tile.x, tile.y, tile.z
+
+    pass
+

+ 46 - 0
src/starearth/grid/sm.py

@@ -0,0 +1,46 @@
+from .grid import Grid
+
+import mercantile
+
+from supermercado import burntiles, super_utils
+
+from starearth import Bound
+
+
+class GridSupermercado(Grid):
+
+    def __init__(self, identifier):
+
+        super().__init__(identifier)
+
+        pass
+
+    def tiles(self, features, z_min, z_max=None):
+
+        polygons = [f for f in super_utils.filter_features(features)]
+
+        zooms = [z_min] if z_max is None else [z for z in range(z_min, z_max + 1)]
+
+        tiles = []
+
+        for zoom in zooms:
+
+            tiles.extend([tile.tolist() for tile in burntiles.burn(polygons, zoom)])
+
+            pass
+
+        return tiles
+
+    def bound_of(self, x, y, z):
+
+        bbox = mercantile.xy_bounds(x, y, z)
+
+        return Bound.RH(bbox[0], bbox[1], bbox[2], bbox[3])
+
+    def tile_of(self, longitude, latitude, zoom):
+
+        tile = mercantile.tile(longitude, latitude, zoom)
+
+        return tile.x, tile.y, tile.z
+
+    pass

+ 30 - 0
src/starearth/nodata.py

@@ -0,0 +1,30 @@
+import numpy as np
+from PIL import Image
+
+
+class Nodata:
+
+    def __init__(self, value):
+
+        self._value = value
+
+        pass
+
+    def process(self, image):
+
+        data = np.asarray(image)
+
+        assert len(self._value) == data.shape[2]
+
+        data_size = (data.shape[0], data.shape[1])
+
+        alpha = np.zeros(data_size, data.dtype)
+        # alpha = np.zeros(image.size, data.dtype)
+
+        alpha[np.all(data != self._value, -1)] = 255
+
+        data = np.concatenate((data, np.expand_dims(alpha, axis=-1)), axis=-1)
+
+        return Image.fromarray(data, 'RGBA')
+
+    pass

+ 0 - 0
src/starearth/output/__init__.py


+ 68 - 0
src/starearth/output/bil.py

@@ -0,0 +1,68 @@
+import os
+import numpy as np
+
+from osgeo import gdal
+from starearth.output.output import Output
+
+
+class OutputBIL(Output):
+
+    def __init__(self, filesystem, accessory, nodata=None):
+
+        super().__init__(filesystem, accessory)
+
+        if nodata is None:
+
+            nodata = -9999
+
+        # bil格式nodata默认为0
+        # self.nodata = nodata
+        self.nodata = 0
+
+        pass
+
+    def write(self, x, y, z, dict_data, properties=None):
+
+        data   = dict_data['data']
+        _type  = dict_data['datatype']
+        height = dict_data['height']
+        width  = dict_data['width']
+
+        ox = dict_data['ox']
+        oy = dict_data['oy']
+        oh = dict_data['oh']
+        ow = dict_data['ow']
+
+        if self._accessory is None:
+
+            exif = None
+            date = properties.get('date')
+        else:
+
+            exif, date = self._accessory.structure_exif_dict(properties)
+
+            pass
+
+        filename = self._formatter.format(x, y, z, 'bil')
+
+        tiles_path = self._filesystem.path(date).resolve('tiles')
+
+        if not os.path.exists(tiles_path):
+            os.makedirs(tiles_path, exist_ok=True)
+
+        path = self._filesystem.path(date).path('tiles').resolve(filename)
+
+        mem_driver = gdal.GetDriverByName('MEM')
+        mem_ds = mem_driver.Create('mem', width, height, 1, _type.to_gdal())
+        mem_band1 = mem_ds.GetRasterBand(1)
+        mem_band1.Fill(self.nodata)
+        mem_band1.SetNoDataValue(self.nodata)
+        mem_band1.WriteArray(data, ox, oy)
+
+        tile_data = mem_band1.ReadAsArray()
+
+        # tile_data = data.astype(np.int16)
+
+        with open(path, 'wb') as f:
+
+            f.write(tile_data.astype(np.int16).tobytes())

+ 81 - 0
src/starearth/output/jpeg.py

@@ -0,0 +1,81 @@
+import io
+import os
+import traceback
+import numpy as np
+
+from PIL import Image
+from starearth.output.output import Output
+
+
+class OutputJPEG(Output):
+
+    def __init__(self, filesystem, accessory, renderer):
+
+        super().__init__(filesystem, accessory)
+
+        self._renderer = renderer
+
+        pass
+
+    def write(self, x, y, z, dict_data, properties=None):
+
+        data = dict_data['data']
+        datatype = dict_data['datatype']
+        height = dict_data['height']
+        width = dict_data['width']
+        ox = dict_data['ox']
+        oy = dict_data['oy']
+        oh = dict_data['oh']
+        ow = dict_data['ow']
+
+        data, mode = self._renderer.process(data)
+
+        if data.shape[2] == 4:
+
+            data = data[:, :, :3]  # 在 JPEG 格式中忽略 Alpha 通道
+            mode = "RGB"
+
+            pass
+
+        pixels = np.zeros((height, width, 3), np.uint8)  # 经过 renderer 处理之后,数值类型已固定
+
+        pixels[oy:oy + oh, ox:ox + ow] = data
+
+        image = Image.fromarray(pixels, mode)
+
+        if self._accessory is None:
+
+            exif = None
+            date = properties.get('date')
+        else:
+
+            exif, date = self._accessory.structure_exif_dict(properties)
+
+            pass
+
+        filename = self._formatter.format(x, y, z, 'jpeg')
+
+        # 临时关闭 combine 优化
+        # if ox is not None and oy is not None:
+        #
+        #     filename = self._formatter.c_format(x, y, z, ox, oy, 'jpeg')
+
+        tiles_path = self._filesystem.path(date).resolve('tiles')
+        if not os.path.exists(tiles_path):
+            os.makedirs(tiles_path, exist_ok=True)
+
+        path = self._filesystem.path(date).path('tiles').resolve(filename)
+
+        if os.path.exists(path):
+            with open(path, 'rb') as fp:
+                old_bytes = fp.read()
+
+            old_img = Image.open(io.BytesIO(old_bytes)).convert("RGBA")
+
+            image.paste(old_img, (0, 0), old_img)
+
+        if exif:
+            image.save(path, exif=exif)
+
+        else:
+            image.save(path)

+ 42 - 0
src/starearth/output/output.py

@@ -0,0 +1,42 @@
+class Formatter:
+
+    def format(self, x, y, z, ext):
+
+        return '{}_{}_{}.{}'.format(z, y, x, ext)
+
+    def c_format(self, x, y, z, ox, oy, ext):
+
+        return 'c_{}_{}_{}_{}_{}.{}'.format(z, y, x, ox, oy, ext)
+
+    pass
+
+
+class Output:
+
+    def __init__(self, filesystem, accessory, formatter=None):
+
+        self._filesystem = filesystem
+
+        self._formatter = formatter if formatter else Formatter()
+
+        self._accessory = accessory
+
+        pass
+
+    @property
+    def filesystem(self):
+
+        return self._filesystem
+
+    @property
+    def formatter(self):
+
+        return self._formatter
+
+    def write(self, x, y, z, dict_data, properties=None):
+
+        pass
+
+    pass
+
+

+ 70 - 0
src/starearth/output/png.py

@@ -0,0 +1,70 @@
+import io
+import os
+import numpy as np
+from PIL import Image
+from starearth.output.output import Output
+
+
+class OutputPNG(Output):
+
+    def __init__(self, filesystem, accessory, renderer):
+
+        super().__init__(filesystem, accessory)
+
+        self._renderer = renderer
+
+        pass
+
+    def write(self, x, y, z, dict_data, properties=None):
+
+        data = dict_data['data']
+        datatype = dict_data['datatype']
+        height = dict_data['height']
+        width = dict_data['width']
+        ox = dict_data['ox']
+        oy = dict_data['oy']
+        oh = dict_data['oh']
+        ow = dict_data['ow']
+
+        data, mode = self._renderer.process(data)
+
+        channels = 3 if mode == 'RGB' else 4
+
+        pixels = np.zeros((height, width, channels), np.uint8)
+
+        pixels[oy:oy + oh, ox:ox + ow] = data
+
+        image = Image.fromarray(pixels, mode)
+
+        if self._accessory is None:
+
+            exif = None
+            date = properties.get('date')
+
+        else:
+
+            exif, date = self._accessory.structure_exif_dict(properties)
+
+            pass
+
+        filename = self._formatter.format(x, y, z, 'png')
+
+        tiles_path = self._filesystem.path(date).resolve('tiles')
+        if not os.path.exists(tiles_path):
+            os.makedirs(tiles_path, exist_ok=True)
+
+        path = self._filesystem.path(date).path('tiles').resolve(filename)
+
+        if os.path.exists(path):
+            with open(path, 'rb') as fp:
+                old_bytes = fp.read()
+
+            old_img = Image.open(io.BytesIO(old_bytes)).convert("RGBA")
+
+            image.paste(old_img, (0, 0), old_img)
+
+        if exif:
+            image.save(path, exif=exif)
+
+        else:
+            image.save(path)

+ 91 - 0
src/starearth/output/raw.py

@@ -0,0 +1,91 @@
+import os
+import numpy as np
+from osgeo import gdal
+from starearth.datatype import DataType
+from starearth.output.output import Output
+
+
+class OutputRAW(Output):
+
+    def __init__(self, filesystem, accessory, nodata=None):
+
+        super().__init__(filesystem, accessory)
+
+        self._nodata = nodata
+
+        pass
+
+    def write(self, x, y, z, dict_data, properties=None):
+
+        data = dict_data['data']
+        datatype = dict_data['datatype']
+        height = dict_data['height']
+        width = dict_data['width']
+        ox = dict_data['ox']
+        oy = dict_data['oy']
+        oh = dict_data['oh']
+        ow = dict_data['ow']
+
+        if len(data.shape) == 2:
+
+            pixels = np.zeros((height, width), data.dtype)
+
+            pixels[oy:oy + oh, ox:ox + ow] = data
+
+            pass
+
+        else:
+
+            pixels = np.zeros((data.shape[0], height, width), data.dtype)
+
+            pixels[:, oy:oy + oh, ox:ox + ow] = data
+
+            pass
+
+        if self._accessory is None:
+
+            exif = None
+            date = properties.get('date')
+        else:
+
+            exif, date = self._accessory.structure_exif_dict(properties)
+
+            pass
+
+        filename = self._formatter.format(x, y, z, 'tif')
+
+        tiles_path = self._filesystem.path(date).resolve('tiles')
+        if not os.path.exists(tiles_path):
+            os.makedirs(tiles_path, exist_ok=True)
+
+        path = self._filesystem.path(date).path('tiles').resolve(filename)
+
+        if len(pixels.shape) == 3:
+            im_bands, im_height, im_width = pixels.shape
+        else:
+            im_bands, (im_height, im_width) = 1, pixels.shape
+
+        driver = gdal.GetDriverByName("GTiff")
+
+        dataset = driver.Create \
+            (
+                path,
+                im_height,
+                im_width,
+                im_bands,
+                DataType.from_numpy(pixels.dtype).to_gdal()
+            )
+
+        dataset.WriteArray(pixels)
+
+        dataset.SetMetadata(properties)
+
+        bands = dataset.RasterCount
+
+        for band in range(bands):
+            if self._nodata._value[band] != None:
+                dataset.GetRasterBand(band + 1).SetNoDataValue(self._nodata._value[band])
+
+        del dataset
+
+        pass

+ 0 - 0
src/starearth/renderer/__init__.py


+ 79 - 0
src/starearth/renderer/greyscale.py

@@ -0,0 +1,79 @@
+import numpy as np
+
+from starearth import ColorMap
+
+
+class RendererGreyscale:
+
+    def __init__(self, datatype, lut=None, nodata=None):
+
+        if lut is None:
+
+            # 如果没有指定 LUT,使用默认(纯黑到纯白)
+
+            lut = ColorMap().data()
+
+            pass
+
+        if nodata is not None:
+
+            # 构造透明通道
+
+            alpha = np.ones(len(lut), np.uint8) * 255
+
+            alpha[nodata] = 0
+
+            # 追加透明通道至 LUT
+
+            lut = np.concatenate((lut, np.expand_dims(alpha, axis=-1)), axis=-1)
+
+            # 模式变为 RGBA
+
+            self._mode = 'RGBA'
+
+            pass
+
+        else:
+
+            self._mode = 'RGB'
+
+            pass
+
+        self._lut = lut
+
+        if datatype.name == "uint8":
+
+            self._value_range = [0, 255]
+
+            pass
+
+        else:
+
+            # 目前默认,将所有非 uint8 类型,使用其最大最小值映射至颜色表两端(拉伸)
+
+            self._value_range = datatype.range
+
+            pass
+
+        self._bias = 0  # 这个值在处理时会被加到像素数据上,使得整体值范围保持在[0, R[1] - R[0]]
+
+        if self._value_range[0] < 0.0:
+
+            self._bias = -self._value_range[0]
+
+            pass
+
+        # 保存一个系数,避免产生重复计算
+
+        self._factor = (len(self._lut) - 1) / (self._value_range[1] - self._value_range[0])
+
+        pass
+
+    def process(self, image):
+
+        indices = np.asarray(image + self._bias, int) * self._factor
+
+        return np.take(self._lut, indices.astype(int), 0), self._mode
+
+    pass
+

+ 98 - 0
src/starearth/renderer/mapbox_rgb.py

@@ -0,0 +1,98 @@
+import numpy as np
+
+
+class _RGB:
+
+    def process(self, image):
+
+        width  = image.shape[1]
+        height = image.shape[0]
+
+        data = np.zeros((height, width, 3), np.uint8)
+
+        for row in range(height):
+
+            for column in range(width):
+
+                pixel = data[row][column]
+
+                v = int((image[row][column] + 10000) * 10)
+
+                pixel[0] = v >> 16 & 0x0000FF
+                pixel[1] = v >>  8 & 0x0000FF
+                pixel[2] = v       & 0x0000FF
+
+                pass
+
+            pass
+
+        return data, 'RGB'
+
+    pass
+
+
+class _RGBA:
+
+    def __init__(self, nodata):
+
+        self._nodata = nodata
+
+        pass
+
+    def process(self, image):
+
+        width  = image.shape[1]
+        height = image.shape[0]
+
+        data = np.zeros((height, width, 4), np.uint8)
+
+        for row in range(height):
+
+            for column in range(width):
+
+                pixel = data[row][column]
+
+                v = int((image[row][column] + 10000) * 10)
+
+                pixel[0] = v >> 16 & 0x0000FF
+                pixel[1] = v >>  8 & 0x0000FF
+                pixel[2] = v       & 0x0000FF
+                pixel[3] = 0 if v == self._nodata else 255
+
+                pass
+
+            pass
+
+        return data, 'RGBA'
+
+    pass
+
+
+class RendererMapboxRGB:
+
+    def __init__(self, datatype, nodata = None):
+
+        self._datatype = datatype
+
+        self._nodata   = nodata
+
+        if self._nodata is None:
+
+            self._implement = _RGB()
+
+            pass
+
+        else:
+
+            self._implement = _RGBA(nodata)
+
+            pass
+
+        pass
+
+    def process(self, image):
+
+        return self._implement.process(image)
+
+    pass
+

+ 57 - 0
src/starearth/renderer/rgb.py

@@ -0,0 +1,57 @@
+import numpy as np
+from starearth import DataType
+
+
+class RendererRGB:
+
+    _datatype: DataType
+
+    def __init__(self, datatype: DataType, channels: [int] = None, nodata = None):
+
+        self._datatype = datatype
+
+        if channels is None:
+
+            channels = [1, 2, 3]
+
+            pass
+
+        self._channels = channels
+        self._nodata   = nodata
+
+        pass
+
+    def process(self, image):
+
+        data = np.asarray([image[i - 1] for i in self._channels], np.float32)
+
+        # 无论 image 为何种数据类型,输出数据类型固定为 uint8
+
+        if self._datatype.name != 'uint8':
+
+            if self._datatype.range[0] < 0.0 or 255.0 < self._datatype.range[1]:
+
+                # 只有超过 uint8 数值范围的数据需要拉伸
+
+                data = data * 256.0 / (2 ** self._datatype.bits)
+
+                pass
+
+            pass
+
+        data = np.stack(data, -1).astype(np.uint8)
+
+        if self._nodata is None:
+
+            return data, 'RGB'
+
+        alpha = np.zeros((data.shape[0], data.shape[1]), data.dtype)
+
+        alpha[np.all(data != self._nodata, -1)] = 255
+
+        data = np.concatenate((data, np.expand_dims(alpha, axis=-1)), axis=-1)
+
+        return data, 'RGBA'
+
+    pass
+

+ 322 - 0
src/starearth/scene.py

@@ -0,0 +1,322 @@
+import math
+import os
+from osgeo import gdal, osr
+
+from .bound      import Bound, V2
+from .coordinate import Coordinate
+from .datatype   import DataType
+from .filename   import Filename
+
+
+class Scene:
+
+    _bound:      Bound
+    _coordinate: Coordinate
+    _datatype:   DataType
+
+    def __init__(self):
+
+        self.filename = None
+
+        self._dataset = None
+        self._wkt     = None
+        self._bound      = None
+        self._coordinate = None
+        self._resolution = None
+
+        self._interpretation = None
+
+        self._datatype = None
+        self._nodata   = None
+
+        self._transform = None
+
+        pass
+
+    @property
+    def coordinate(self):
+
+        return self._coordinate
+
+    @property
+    def nodata(self):
+
+        return self._nodata
+
+    @property
+    def interpretation(self):
+
+        return self._interpretation
+
+    @property
+    def dataset(self):
+
+        return self._dataset
+
+    @property
+    def transform(self):
+
+        return self._transform
+
+    @property
+    def projection(self):
+
+        return self._coordinate
+
+    @property
+    def bound(self) -> Bound:
+
+        return self._bound
+
+    @property
+    def resolution(self):
+
+        return self._resolution
+
+    @property
+    def width(self) -> int:
+
+        return self._dataset.RasterXSize
+
+    @property
+    def height(self) -> int:
+
+        return self._dataset.RasterYSize
+
+    @property
+    def bands(self) -> int:
+
+        return self._dataset.RasterCount
+
+    @property
+    def datatype(self):
+
+        return self._datatype
+
+    @staticmethod
+    def information(filename):
+
+        if isinstance(filename, str):
+
+            filename = Filename(filename)
+
+            pass
+
+        dataset: gdal.Dataset = gdal.Open(str(filename), gdal.GA_ReadOnly)
+
+        if dataset is None:
+
+            raise Exception(f"{filename} open failed.")
+
+        return gdal.Info \
+            (
+                dataset,
+                options=gdal.InfoOptions(format="json")
+            )
+
+    def load(self, filename):
+
+        if isinstance(filename, str):
+
+            filename = Filename(filename)
+
+            pass
+
+        dataset: gdal.Dataset = gdal.Open(str(filename), gdal.GA_ReadOnly)
+
+        if dataset is None:
+
+            raise Exception(f"{filename} open failed.")
+
+        # 优先使用参数指定的投影信息
+        # 如果参数没有指定投影,那么使用自带的
+        # 没有自带投影,那么使用一个默认值
+
+        self._coordinate = Coordinate.from_wkt(dataset.GetProjection())
+
+        self._init(dataset)
+
+        self.filename = filename
+
+        self._dataset = dataset
+
+        pass
+
+    def project(self, coordinate):
+
+        if coordinate == self._coordinate:
+
+            return
+
+        dataset = self._dataset
+
+        dataset = Coordinate.warp(dataset, coordinate)
+
+        self._coordinate = coordinate
+
+        self._init(dataset)
+
+        self._dataset = dataset
+
+        pass
+
+    def build_overviews(self):
+
+        self._dataset.BuildOverviews("AVERAGE", [2, 4, 8, 16, 32])
+
+        pass
+
+    def sample(self, bound: Bound, width: int, height: int, span: tuple[int, int] = None):
+
+        """
+        在场景中重采样并返回结果。
+
+        :param bound:  经纬度边界
+        :param width:  期望输出的像素宽度
+        :param height: 期望输出的像素高度
+        :param span:   输出像素扩展尺寸(目前仅满足来自 DEM 切片需求,128x128 向右下方扩展至 129x129)
+        :return:       采样后的波段数据以及其相对瓦片坐标的位置信息;
+                       当边界与场景没有交集时,返回 None。
+        """
+
+        # 计算左下角位置
+
+        tx = (bound.left   - self.bound.left  ) / self.resolution.x
+        ty = (bound.bottom - self.bound.bottom) / self.resolution.y
+
+        # 计算全图尺寸
+
+        tw = bound.width  / self.resolution.x
+        th = bound.height / self.resolution.y
+
+        # 图像和瓦片的地理信息都是右手坐标系,像素操作需要切换至左手坐标系
+        # bi是输入的tif数据,bt是想挖取的bound,bi只有部分和bt重叠,那么就只挖取这部分
+
+        bi = Bound.LH_XYWH(0.0,                   0.0, float(self.width), float(self.height))
+        bt = Bound.LH_XYWH( tx, self.height - th - ty,                tw,                 th)  # 反转 Y 轴
+
+        # 相交
+
+        bp = bi.intersect(bt)
+
+        if bp is None:
+
+            return None
+
+        # 映射至输出
+
+        rx = bt.width  / width
+        ry = bt.height / height
+
+        x = int(bp.x)
+        y = int(bp.y)
+
+        ox = int(-bt.x / rx) if bt.x < 0.0 else 0
+        oy = int(-bt.y / ry) if bt.y < 0.0 else 0
+
+        if span:
+
+            width  += span[0]
+            height += span[1]
+
+            w = int(bp.width  + span[0] * rx)
+            h = int(bp.height + span[1] * ry)
+
+            ow = int(math.ceil(bp.width  / rx)) + span[0]
+            oh = int(math.ceil(bp.height / ry)) + span[1]
+
+            pass
+
+        else:
+
+            w = int(bp.width )
+            h = int(bp.height)
+
+            ow = int(math.ceil(bp.width  / rx))
+            oh = int(math.ceil(bp.height / ry))
+
+            pass
+
+        # 越界检查
+
+        w = (self.width  - x) if (x + w) > self.width  else w
+        h = (self.height - y) if (y + h) > self.height else h
+
+        ox = (width  - ow) if (ox + ow) > width  else ox
+        oy = (height - oh) if (oy + oh) > height else oy
+
+        # 重采样
+
+        data = self._dataset.ReadAsArray(x, y, w, h, None, ow, oh)
+
+        return \
+            {
+                'data': data,
+                'datatype': self._datatype,
+                'width': width,
+                'height': height,
+                'ox': ox,
+                'oy': oy,
+                'ow': ow,
+                'oh': oh
+            }
+
+    def _init(self, dataset):
+
+        info = gdal.Info \
+                (
+                dataset,
+                options=gdal.InfoOptions(format="json")
+            )
+
+        width  = dataset.RasterXSize
+        height = dataset.RasterYSize
+
+        self._transform = dataset.GetGeoTransform()
+
+        self._bound = Bound.from_transform(width, height, self._transform)
+
+        self._resolution = V2(self._transform[1], -self._transform[5])
+
+        band = dataset.GetRasterBand(1)
+
+        bands = info["bands"]
+
+        if band.DataType == gdal.GDT_Byte:
+
+            min_max = (0, 255)
+
+        elif "min" in bands[0] and "max" in bands[0]:
+
+            min_max = (bands[0]["min"], bands[0]["max"])
+
+            pass
+
+        else:
+
+            min_max = band.ComputeRasterMinMax()
+
+            pass
+
+        self._datatype = DataType.from_gdal(band.DataType, min_max)
+
+        self._interpretation = \
+            [
+                {
+                    "band": band,
+                    "type": self.datatype.name,
+                    "colorInterpretation": gdal.GetColorInterpretationName(dataset.GetRasterBand(band).GetColorInterpretation()),
+                    "noDataValue": dataset.GetRasterBand(band).GetNoDataValue()
+                }
+                for band in range(1, dataset.RasterCount + 1)
+            ]
+
+        self._nodata = \
+            [
+                dataset.GetRasterBand(index).GetNoDataValue()
+                for index in range(1, dataset.RasterCount + 1)
+            ]
+
+        pass
+
+    pass

+ 79 - 0
src/starearth/sheet.py

@@ -0,0 +1,79 @@
+import json
+
+
+class Sheet:
+
+    def __init__(self):
+
+        self.features = None
+
+        self.date = None
+
+        pass
+
+    @staticmethod
+    def parse(data):
+
+        features = []
+
+        properties = data['properties']
+
+        date = properties['date']
+        date = 'default' if date is None else date
+
+        geometry = data['geometry']
+
+        if geometry['type'] == 'MultiPolygon':
+
+            for coordinates in geometry['coordinates']:
+
+                feature = \
+                    {
+                        'type':       'Feature',
+                        'properties': properties,
+                        'geometry':
+                            {
+                                'type':        'Polygon',
+                                'coordinates': coordinates
+                            }
+                    }
+
+                features.append(feature)
+
+                pass
+
+            pass
+
+        elif geometry['type'] == 'Polygon':
+
+            features.append(data)
+            pass
+
+        instance = Sheet()
+
+        instance.date     = date
+        instance.features = features
+
+        return instance
+
+    @staticmethod
+    def parse_geojsonl(filename):
+
+        sheets = []
+
+        with open(filename) as stream:
+
+            for line in stream.readlines():
+
+                data = json.loads(line.strip())
+
+                sheets.append(Sheet.parse(data))
+
+                pass
+
+            pass
+
+        return sheets
+
+    pass
+

+ 204 - 0
src/starearth/shp_to_geojsonl.py

@@ -0,0 +1,204 @@
+import copy
+import json
+import os
+import shapefile
+import shapely.geometry
+from geojson import Feature
+from datetime import datetime, date
+from osgeo import gdal
+from app.defines import MISSION_TYPE, ONE_E_MINUS4, ONE_E_MINUS6
+
+
+class ComplexEncoder(json.JSONEncoder):
+
+    def default(self, obj):
+
+        if isinstance(obj, datetime):
+            return obj.strftime('%Y-%m-%d %H:%M:%S')
+
+        elif isinstance(obj, date):
+            return obj.strftime("%Y-%m-%d")
+
+        else:
+            return json.JSONEncoder.default(self, obj)
+
+
+def convert_date_time(date_time: str):
+    """将datetime转换成%Y-%m-%d %H:%M%S的格式
+    将date转换成%Y-%m-%d的格式
+    """
+    date_time_len = len(date_time)
+
+    # "2014-02-15"
+    if date_time_len == 10:
+        _date = date_time
+        date_time = "{} 00:00:00".format(date_time)
+
+    # "2014-02-15 00:00:00"
+    elif date_time_len == 19:
+        _date = date_time.split(' ')[0]
+
+    else:
+        _date = date_time
+
+    return _date, date_time
+
+
+def shp_to_geojsonl(field_list, filesystem):
+    
+    sf_type_mapping = {
+        3: 'LineString',
+        5: 'Polygon',
+    }
+
+    # 将用户手动输入的信息变成 k:v,保存到geojsonl的properties中
+    for field_list_one in field_list:
+
+        path_list = field_list_one['path_list']
+        field_dict = field_list_one['field_dict']
+
+        for k, v in field_dict.items():
+
+            if not isinstance(v, dict):
+                continue
+
+            # type是constant,表明是该字段的值用户手工输入的
+            # type是field,表明该字段的值是用户指定shp数据某个字段
+            if v['type'] == 'constant':
+                field_dict[k] = v['value']
+
+            pass
+
+        for shp_relative_path in path_list:
+            filename_shp = os.path.basename(shp_relative_path)
+            filename, ext = os.path.splitext(filename_shp)
+
+            # 拼接shp文件路径,以及geojsonl
+            shp_file_path = str(filesystem.path(shp_relative_path))
+            geojsonl_file_path = shp_file_path.replace('.shp', '.geojsonl')
+
+            # 读取shp文件
+            sf = shapefile.Reader(shp_file_path)
+            sf_type = sf_type_mapping[sf.shapeType]
+
+            borders = sf.shapes()
+            shp_fields = sf.fields
+            shp_records = sf.records()
+
+            # 记录shp数据所有字段的下标,key是字段名,value是下标
+            shp_fields_index_dic = {}
+
+            minus_num = 0
+
+            for index, field in enumerate(shp_fields):
+
+                if isinstance(field, tuple):
+                    minus_num += 1
+                    continue
+
+                shp_fields_index_dic[str(field[0])] = index - minus_num
+
+            all_features = []
+
+            # 判断是否有与shp文件同名的tif,如果没有则无法继续
+            abs_path = os.path.dirname(shp_file_path)
+
+            shpfilename = os.path.basename(shp_file_path)
+            name, ext = os.path.splitext(shpfilename)
+
+            # 查找shp同名的tif
+            for _file in os.listdir(abs_path):
+
+                tif_path = os.path.join(abs_path, _file)
+
+                if not os.path.isfile(tif_path):
+                    continue
+
+                _name, _ext = os.path.splitext(_file)
+
+                if _ext.lower() not in ['.tif', '.tiff', ] or _name != name:
+                    continue
+
+                break
+
+            ds = gdal.Open(tif_path)
+            res = float(ds.GetGeoTransform()[1])
+
+            # 根据用户指定的字段名,从shp数据中获取值,并转换成 k:v,保存到geojsonl的properties中
+            for record_index, record in enumerate(shp_records):
+
+                properties = copy.deepcopy(field_dict)
+
+                for k, v in properties.items():
+
+                    if not isinstance(v, dict):
+                        continue
+
+                    # 根据用户指定的字段名获取该字段在shp数据中的位置,以取得当前要素该字段的值
+                    properties[k] = record[shp_fields_index_dic[v['value']]]
+
+                # 处理datetime、date、item_id三个字段
+                _date, date_time = convert_date_time(str(properties['datetime']).strip())
+                properties['date'] = _date
+                properties['datetime'] = date_time
+                properties['item_id'] = filename
+
+                # 根据用户提供的数据样例,李老师规定 off_nadir、sun_elev的字段值存入bk1,分号隔开,accuracy的字段值存入bk2,cloudcover的字段值存入bk3
+                off_nadir_index = shp_fields_index_dic.get('off_nadir', '')
+                sun_elev_index = shp_fields_index_dic.get('sun_elev', '')
+                accuracy_index = shp_fields_index_dic.get('accuracy', '')
+                cloudcover_index = shp_fields_index_dic.get('cloudcover', '')
+
+                bk1_data = ''
+                bk2_data = ''
+                bk3_data = ''
+
+                if isinstance(off_nadir_index, int):
+                    bk1_data += '{}:'.format(record[off_nadir_index])
+
+                if isinstance(sun_elev_index, int):
+                    bk1_data += ':{}'.format(record[sun_elev_index])
+                    bk1_data = bk1_data.replace('::', ':')
+
+                if isinstance(accuracy_index, int):
+                    bk2_data = record[accuracy_index]
+
+                if isinstance(cloudcover_index, int):
+                    bk3_data = record[cloudcover_index]
+
+                properties['bk1'] = bk1_data
+                properties['bk2'] = bk2_data
+                properties['bk3'] = bk3_data
+
+                # if 'disp' not in properties:
+                properties['disp'] = 1
+
+                # if 'res' not in properties:
+                properties['res'] = res
+
+                coordinates = [list(li) for li in borders[record_index].points]
+
+                geometry = {
+                    'type': sf_type,
+                    'coordinates': [coordinates, ],
+                }
+
+                # 调用simplify函数进行几何简化
+                origin_geom = shapely.geometry.shape(geometry)  # 从GeoJSON得到shapely的geometry对象
+                simplified_geom = origin_geom.simplify(ONE_E_MINUS4)
+
+                # geojson转feature
+                feature = Feature(geometry=simplified_geom)
+                feature["properties"] = properties
+
+                all_features.append(feature)
+
+            # 存储当前的geojsonl,所有的要素记录
+            with open(geojsonl_file_path, 'w') as fb:
+
+                for feature in all_features:
+
+                    json.dump(feature, fb, cls=ComplexEncoder)
+                    fb.write('\n')
+
+    pass

+ 319 - 0
src/starearth/slicer.py

@@ -0,0 +1,319 @@
+import math
+import multiprocessing as mp
+import numpy           as np
+
+from PIL         import Image
+
+from .coordinate import EPSG, Coordinate
+from .grid       import Grid, GridFactory
+from .scene      import Scene
+
+
+
+def process_sample(filename, grid, tiles, square, output, cache, date, properties, total_num, epsg, coordinate=None, span=None):
+
+    from .scene import Scene
+
+    scene = Scene()
+
+    scene.load(filename)
+
+    if coordinate:
+
+        scene.project(coordinate)
+
+        pass
+
+    grid = GridFactory.create(grid)
+
+    increment = 1 / total_num
+
+    # slice_each = Slice_Each()
+    # for x, y, z in slice_each(tiles, increment):
+    for x, y, z in tiles:
+
+        bound = grid.bound_of(x, y, z)
+
+        dict_data = scene.sample(bound, square, square, span)
+
+        output.write(x, y, z, dict_data, properties)
+
+        pass
+
+    pass
+
+
+def process_combine(filename, tiler, tiles, square, output, cache, date, total_num):
+    # 创建单个底板 blank
+    blank = Image.fromarray(np.zeros((square, square, 3), np.uint8), 'RGB')
+
+    for x, y, z in tiles:
+        # 建立一个新的背景板
+        canvas = Image.fromarray(np.zeros((square * 2, square * 2, 3), np.uint8), 'RGB')
+
+        tz = z + 1
+
+        # 计算z级的 x y位置瓦片,由四个 z + 1 的瓦片合并的 tx ty取值
+        tx0 = x * 2
+        tx1 = tx0 + 1
+        ty0 = y * 2
+        ty1 = ty0 + 1
+
+        im_00, dict_00 = cache.take(tx0, ty0, tz, blank, date)
+        im_10, dict_10 = cache.take(tx1, ty0, tz, blank, date)
+        im_01, dict_01 = cache.take(tx0, ty1, tz, blank, date)
+        im_11, dict_11 = cache.take(tx1, ty1, tz, blank, date)
+
+        # 将四个z+1级图像,分别放到:左上角,右上角,左下角,右下角
+        canvas.paste(im_00, (0 + dict_00['ox'], 0 + dict_00['oy']))
+        canvas.paste(im_10, (square + dict_10['ox'], 0 + dict_10['oy']))
+        canvas.paste(im_01, (0 + dict_01['ox'], square + dict_01['oy']))
+        canvas.paste(im_11, (square + dict_11['ox'], square + dict_11['oy']))
+
+        ## canvas.paste(im_00, (0, 0))
+        ## canvas.paste(im_01, (square, 0))
+        ## canvas.paste(im_10, (0, square))
+        ## canvas.paste(im_11, (square, square))
+
+        image = canvas.resize((square, square), Image.BOX)
+
+        cache.put(x, y, z, image)
+
+        output.write(x, y, z, image)
+
+        # #获取canvas的最小包围盒的宽高
+
+        # width_00, height_00 = im_00.size[0], im_00.size[-1]
+        # width_01, height_01 = im_01.size[0], im_01.size[-1]
+        # width_10, height_10 = im_10.size[0], im_10.size[-1]
+        # width_11, height_11 = im_11.size[0], im_11.size[-1]
+
+        # width_00_10 = width_00 + width_10
+        # width_01_11 = width_01 + width_11
+        #
+        # # canvas_width=width_00_10
+        # canvas_width=max( width_00_10, width_01_11)
+        #
+        # height_00_01 = height_00 + height_01
+        # height_10_11 = height_10 + height_11
+        #
+        # # canvas_height = height_00_01
+        # canvas_height = max(height_00_01, height_10_11)
+        #
+        # canvas_min = Image.fromarray(np.zeros((canvas_width, canvas_height, 3), np.uint8), 'RGB')
+        #
+        # # 将四个z+1级图像,分别放到:左上角,右上角,左下角,右下角 (右下角位置 需要根据 其他三个的宽高确定)
+        # if im_00:
+        #     canvas_min.paste(im_00, (0, 0))
+        #     if im_10:
+        #         canvas_min.paste(im_10, (width_00, 0))
+        #     if im_01:
+        #         canvas_min.paste(im_01, (0, height_00))
+        #     if im_11:
+        #         # canvas_min.paste(im_11, (width_00, height_00))
+        #         canvas_min.paste(im_11, (max(width_00, width_10), max(height_00, height_01)))
+        #
+        #     # 保存一个标准大小的图像数据 根据左上角的图像 (还会有黑线 但是先合成在 放入)
+        #     canvas.paste(canvas_min, (0 + dict_00['ox'], 0 + dict_00['oy']))
+        # else:
+        #     if im_01:
+        #         canvas_min.paste(im_01, (0, 0))
+        #         # canvas_min.paste(im_11, (width_00, height_00))
+        #         if im_11:
+        #             canvas_min.paste(im_11, (width_10, 0))
+        #
+        #             # 保存一个标准大小的图像数据 根据左上角的图像 (还会有黑线 但是先合成在 放入)
+        #             canvas.paste(canvas_min, (square + dict_01['ox'], 0 + dict_01['oy']))
+        #
+        # # # 保存一个标准大小的图像数据 根据左上角的图像 (还会有黑线 但是先合成在 放入)
+        # # canvas.paste(canvas_min, (0 + dict_00['ox'], 0 + dict_00['oy']))
+        #
+        # image = canvas.resize((square, square), Image.BOX)
+        #
+        # cache.put(x, y, z, image)
+        #
+        # output.write(x, y, z, image)
+
+        # image = canvas_min.resize((math.ceil(canvas_width/2), math.ceil(canvas_height/2)), Image.BOX)
+        #
+        # # 如果缩小后图像与 标准大小不一致 则需要记录一下该图像 为后续四合一做准备
+        # if image.height != square or image.width != square:
+        #
+        #     cache.put(x, y, z, image)
+        #
+        #     output.write(x, y, z, image, ox=dict_00['ox'], oy=dict_00['oy'])
+
+        pass
+
+    pass
+
+
+def cbe(exception):
+
+    print(exception)
+
+    pass
+
+
+class Slicer:
+
+    _scene: Scene
+    _grid:  Grid
+
+    def __init__(self, scene: Scene, square: int, output, grid: Grid):
+
+        self._scene  = scene
+        self._square = square
+        self._output = output
+        self._grid   = grid
+
+
+    def slice(self, z_min: int, z_max: int, epsg, sheets=None, span=None, coordinate=None):
+
+        assert z_min <= z_max
+
+        # count = mp.cpu_count()
+        count = math.ceil(mp.cpu_count() / 2)
+
+        with mp.Manager() as manager:
+
+            cache = None
+
+            if sheets:
+
+                for sheet in sheets:
+
+                    # bound = Grid.bound_by(sheet.features)
+                    feature = sheet.features
+
+                    properties = sheet.features[0]['properties']
+
+                    # tiles = self._grid.tiles(bound, z_min, z_max)
+                    tiles = self._grid.tiles(feature, z_min, z_max)
+
+                    total_num = len(tiles)
+
+                    # 从最大级别开始,依次降级,多进程切片(拼合)
+
+                    for z in np.flipud(range(z_min, z_max + 1)):
+
+                        self._run_mp(cache, z, process_sample, count, sheet.date, feature, properties, total_num, epsg, coordinate, span)
+
+            else:
+
+                bound = self._scene.bound.value
+
+                properties = {}
+                tiles = self._grid.tiles(bound, z_min, z_max)
+                total_num = len(tiles)
+
+                for z in np.flipud(range(z_min, z_max + 1)):
+
+                    self._run_mp(cache, z, process_sample, count, 'default', bound, properties, total_num, epsg, coordinate, span)
+
+    def _run_mp(self, cache, z, function, processes, date, feature, properties, total_num, epsg, coordinate, span):
+
+        tiles = self._grid.tiles(feature, z)
+
+        areas = np.array_split(tiles, processes)
+
+        pool = mp.Pool(processes)
+
+        for area in areas:
+
+            if len(area) == 0:
+
+                continue
+                pass
+
+            pool.apply_async \
+                (
+                    function,
+                    (
+                        self._scene.filename,
+                        self._grid.identifier,  # Grid 对象无法被 pickle,所以传标识符进去
+                        area,
+                        self._square,
+                        self._output,
+                        cache,
+                        date,
+                        properties,
+                        total_num,
+                        epsg,
+                        coordinate,
+                        span
+                    ),
+                    error_callback=cbe
+                )
+
+            pass
+
+        pool.close()
+        pool.join()
+
+        pass
+
+    def _slice_sp(self, z_min, z_max, epsg, sheets=None, span=None, coordinate=None):
+
+        # 单进程切片,仅调试用
+
+        assert z_min <= z_max
+
+        cache = None
+
+        if sheets:
+
+            for sheet in sheets:
+
+                bound = Grid.bound_by(sheet.features)
+
+                for z in np.flipud(range(z_min, z_max + 1)):
+
+                    tiles = self._grid.tiles(bound, z)
+
+                    areas = np.array_split(tiles, 1)
+
+                    for area in areas:
+
+                        process_sample \
+                            (
+                                self._scene.filename,
+                                self._grid.identifier,
+                                area,
+                                self._square,
+                                self._output,
+                                cache,
+                                sheet.date,
+                                None,
+                                1,
+                                epsg,
+                                coordinate,
+                                span,
+                            )
+
+
+        else:
+
+            bound = self._scene.bound.value
+
+            for z in np.flipud(range(z_min, z_max + 1)):
+
+                tiles = self._grid.tiles(bound, z)
+
+                areas = np.array_split(tiles, 1)
+
+                for area in areas:
+
+                    process_sample \
+                        (
+                            self._scene.filename,
+                            self._grid.identifier,
+                            area,
+                            self._square,
+                            self._output,
+                            cache,
+                            'default',
+                            None,
+                            1,
+                            epsg
+                        )

+ 0 - 0
src/starearth/storage/__init__.py


+ 208 - 0
src/starearth/storage/arcgis_zyx.py

@@ -0,0 +1,208 @@
+import os
+import json
+import shutil
+from PIL import Image
+from starearth.utils.tileset import TileSet
+from tilecloud import TileStore, Tile, TileCoord
+
+
+class StorageARCZYX:
+    def __init__(self, tiles_path, min_zoom, max_zoom, merging, output_path, tileset):
+        self.tiles_path          = tiles_path
+        self.min_zoom            = min_zoom
+        self.max_zoom            = max_zoom
+        self.merging             = merging
+        self.output_path         = output_path
+        self.tileset_id          = tileset.get('id', '')
+        self.tileset_name        = tileset.get('name', '')
+        self.tileset_owner       = tileset.get('owner', '')
+        self.tileset_tile_grid   = tileset.get('tile_grid', '')
+        self.tileset_attribution = tileset.get('attribution', '')
+        self.tileset_description = tileset.get('description', '')
+
+    def storage(self):
+        tile_set             = TileSet()
+        tile_set.id          = self.tileset_id
+        tile_set.minzoom     = self.min_zoom
+        tile_set.maxzoom     = self.max_zoom
+        tile_set.name        = self.tileset_name
+        tile_set.owner       = self.tileset_owner
+        tile_set.tile_grid   = self.tileset_tile_grid
+        tile_set.attribution = self.tileset_attribution
+        tile_set.description = self.tileset_description
+
+        tile_set.bounds = []
+        msmt_list = list()
+        date_list = []
+
+        # 开始处理切片目录下的瓦片
+        # for tile_dir in os.listdir(self.tiles_path):
+        #
+        #     abs_tiles_path = os.path.join(self.tiles_path, tile_dir)
+
+        abs_tiles_path = self.tiles_path
+
+        for tile_date in os.listdir(abs_tiles_path):
+
+            date_dir_path = os.path.join(abs_tiles_path, tile_date)
+
+            if not os.path.isdir(date_dir_path):
+                continue
+
+            meta_json_path = os.path.join(date_dir_path, 'meta.json')
+            imagesheet_geojsonl_path = os.path.join(date_dir_path, 'imagesheet.geojsonl')
+            tile_list_path = os.path.join(date_dir_path, 'tiles_list.json')
+
+            with open(meta_json_path, 'r') as fb:
+                data = json.load(fb)
+                bbox = data['bbox']
+                date_meta_min_z = int(data['min_z'])
+                date_meta_max_z = int(data['max_z'])
+                tile_format = data['tileset']['tile_format']
+
+            if tile_set.bounds == []:
+                tile_set.bounds = bbox
+
+            else:
+                _min_lon = min(tile_set.bounds[0], bbox[0])
+                _min_lat = min(tile_set.bounds[1], bbox[1])
+                _max_lon = min(tile_set.bounds[2], bbox[2])
+                _max_lat = min(tile_set.bounds[3], bbox[3])
+
+                tile_set.bounds = [_min_lon, _min_lat, _max_lon, _max_lat]
+
+                pass
+            # 获取瓦片列表
+            with open(tile_list_path, 'r') as fb:
+                tile_list = json.load(fb)
+            # 拼接瓦片源路径和目的路径
+            local_tiles_src = "file://{}/tiles/%(z)d_%(y)d_%(x)d.{}".format(date_dir_path                            , tile_format)
+            local_tiles_dst = "file://{}/tiles/%(z)d/%(y)d/%(x)d.{}".format(os.path.join(self.output_path, tile_date), tile_format)
+
+            ts_input = TileStore.load(local_tiles_src)
+            ts_output = TileStore.load(local_tiles_dst)
+
+            # 使用瓦片列表构造Tile对象
+            tiles = [Tile(TileCoord(z, x, y)) for x, y, z in tile_list]
+
+            for tile in tiles:
+                tile = ts_input.get_one(tile)
+
+                if tile is None:
+                    continue
+
+                if tile in ts_output:
+                    if self.merging == 0:
+                        # 仅保留原瓦片
+                        continue
+
+                    elif self.merging == 1:
+                        # 替换原瓦片
+                        ts_output.put_one(tile)
+
+                    elif self.merging == 2:
+                        # 合并同名瓦片(原瓦片在上)
+                        z, x, y = [int(i) for i in str(tile.tilecoord).split('/')]
+
+                        origin_tile_path = local_tiles_dst.replace('file://', '') % {'z': z, 'y': y, 'x': x}
+                        new_tile_path    = local_tiles_src.replace('file://', '') % {'z': z, 'y': y, 'x': x}
+
+                        origin_img = Image.open(origin_tile_path)
+                        new_img = Image.open(new_tile_path)
+
+                        new_img.paste(origin_img, (0, 0), origin_img)
+
+                        new_img.save(origin_tile_path)
+
+                        pass
+
+                    elif self.merging == 3:
+                        # 合并同名瓦片(原瓦片在下)
+
+                        z, x, y = [int(i) for i in str(tile.tilecoord).split('/')]
+
+                        origin_tile_path = local_tiles_dst.replace('file://', '') % {'z': z, 'y': y, 'x': x}
+                        new_tile_path    = local_tiles_src.replace('file://', '') % {'z': z, 'y': y, 'x': x}
+
+                        origin_img = Image.open(origin_tile_path)
+                        new_img = Image.open(new_tile_path)
+
+                        origin_img.paste(new_img, (0, 0), new_img)
+
+                        origin_img.save(origin_tile_path)
+
+                        pass
+
+                else:
+                    # 存储瓦片
+                    ts_output.put_one(tile)
+
+            if not os.path.exists(os.path.join(self.output_path, tile_date)):
+                os.makedirs(os.path.join(self.output_path, tile_date), exist_ok=True)
+
+            # 合并meta.json
+            dst_meta_path = os.path.join(self.output_path, tile_date, 'meta.json')
+            if os.path.exists(dst_meta_path):
+                with open(dst_meta_path, 'r') as fb:
+                    dst_meta_data = json.load(fb)
+                    dst_meta_bbox = dst_meta_data['bbox']
+                    dst_meta_min_z = int(dst_meta_data['min_z'])
+                    dst_meta_max_z = int(dst_meta_data['max_z'])
+
+                dst_meta_min_z = min(date_meta_min_z, dst_meta_min_z)
+                dst_meta_max_z = max(date_meta_max_z, dst_meta_max_z)
+
+                left = min(dst_meta_bbox[0], bbox[0])
+                bottom = min(dst_meta_bbox[1], bbox[1])
+                right = max(dst_meta_bbox[2], bbox[2])
+                top = max(dst_meta_bbox[3], bbox[3])
+
+                dst_meta_data['bbox'] = [left, bottom, right, top]
+
+                dst_meta_data['min_z'] = dst_meta_min_z
+                dst_meta_data['max_z'] = dst_meta_max_z
+
+                dst_meta_data['geometry']['coordinates'] = [
+                    [[left, top], [right, top], [right, bottom], [left, bottom], [left, top]]]
+
+                with open(dst_meta_path, 'w') as fb:
+                    json.dump(dst_meta_data, fb)
+
+            else:
+                shutil.copy(meta_json_path, os.path.join(self.output_path, tile_date))
+
+            # 合并imagesheet.geojsonl
+            dst_imagesheet_path = os.path.join(self.output_path, tile_date, 'imagesheet.geojsonl')
+            if os.path.exists(dst_imagesheet_path):
+
+                with open(imagesheet_geojsonl_path, 'r') as fb:
+                    new_imagesheet_data = json.load(fb)
+
+                with open(dst_imagesheet_path, 'a+') as fb:
+                    fb.write(json.dumps(new_imagesheet_data))
+            else:
+                shutil.copy(imagesheet_geojsonl_path, os.path.join(self.output_path, tile_date))
+
+            msmtdate = {
+                tile_date: {
+                    "storage_type": "ARC_ZYX",
+                    "path": "ARC_ZYX://{}/{}".format(self.output_path, tile_date)
+                }
+            }
+
+            if tile_date not in date_list:
+                msmt_list.append(msmtdate)
+                date_list.append(tile_date)
+
+        # 经纬度中心点
+        tile_set.center = [
+            (tile_set.bounds[0] + tile_set.bounds[2]) / 2,
+            (tile_set.bounds[1] + tile_set.bounds[3]) / 2,
+            self.min_zoom
+        ]
+
+        tile_set.msmt_info = msmt_list
+
+        tile_set.save(path=self.output_path)
+
+        pass

+ 462 - 0
src/starearth/storage/fast_dfs.py

@@ -0,0 +1,462 @@
+"""
+
+使用 FastDFS 进行文件的存储/读取
+
+目前的 fdfs_client 限制:
+
+1. tracker / storage 无法运行于同一个 docker compose 环境
+2. 没有文件系统概念,只能依赖第三方(Redis)的键值映射模拟文件系统
+3. 自带 metadata 记录机制需要进行二次通讯,不方便也不安全
+
+"""
+
+
+import json
+import os
+
+from enum import Enum
+
+import redis
+
+from fdfs_client.client import get_tracker_conf, Fdfs_client
+
+
+class FSNode:
+
+    class Type(Enum):
+
+        Directory = 1
+        File      = 2
+
+        pass
+
+    def __init__(self, type, name):
+
+        self._type     = type
+        self._name     = name
+        self._children = {}
+
+        pass
+
+    def __contains__(self, name):
+
+        return name in self._children
+
+    def __str__(self):
+
+        return "\n".join(self._to_string_lines(""))
+
+    @property
+    def type(self):
+
+        return self._type
+
+    @property
+    def name(self):
+
+        return self._name
+
+    @property
+    def children(self):
+
+        return self._children
+
+    def add(self, node):
+
+        self._children[FSNode._key(node)] = node
+
+        return node
+
+    def get(self, name):
+
+        return self._children.get(name)
+
+    def _to_string_lines(self, indent=""):
+
+        data = [f"{indent}{self.name}"]
+
+        indent += "  "
+
+        if self._type == FSNode.Type.Directory:
+
+            directories = []
+            files       = []
+
+            for name, node in self._children.items():
+
+                if node.type == FSNode.Type.Directory:
+
+                    directories.append(name)
+
+                    pass
+
+                else:
+
+                    files.append(name)
+
+                    pass
+
+                pass
+
+            directories = sorted(directories)
+            files       = sorted(files)
+
+            for name in directories:
+
+                node: FSNode = self._children[name]
+
+                data.extend(node._to_string_lines(indent))
+
+                pass
+
+            for name in files:
+
+                node: FSNode = self._children[name]
+
+                data.extend(node._to_string_lines(indent))
+
+                pass
+
+            pass
+
+        return data
+
+    @staticmethod
+    def _key(node):
+
+        return f"{node.name}"
+
+    pass
+
+
+class FastDFS:
+
+    class Error(Exception):
+
+        pass
+
+    def __init__(self, host, port, db):
+
+        pool = redis.ConnectionPool(host=host, port=port, db=db)
+
+        self._redis = redis.Redis(connection_pool=pool)
+
+        tracker = get_tracker_conf("/work/starearth_tiler/conf/fastdfs.conf")
+
+        self._client = Fdfs_client(tracker)
+
+        pass
+
+    def put(self, file_local: str, file_remote: str, metadata=None):
+
+        """
+
+        将本地文件存储至远程
+
+        :param file_local:  本地文件名(可包含路径)
+        :param file_remote: 远程文件名(可包含路径)
+        :param metadata:    可选附带元数据(字典对象)
+
+        """
+
+        # 检查本地文件
+
+        FastDFS._check_local_file(file_local)
+
+        # 确保目标路径文件名格式正确
+
+        FastDFS._verify_remote_name(file_remote)
+
+        # 分解路径文件名
+
+        directories, name = FastDFS._split(file_remote)
+
+        if name is None:
+
+            name = os.path.basename(file_local)
+
+            pass
+
+        if metadata is None:
+
+            metadata = {}
+
+            pass
+
+        # 总是上传新文件
+
+        info = self._upload(file_local)
+
+        info     = json.dumps(info,     ensure_ascii=False)
+        metadata = json.dumps(metadata, ensure_ascii=False)
+
+        key = self._key(directories, name)
+
+        # 如果存在旧文件索引,删除文件
+
+        value = self._redis.get(key)
+
+        if value:
+
+            value = value.decode()
+
+            file_id = json.loads(value.split("$")[0])["id"].encode()
+
+            self._client.delete_file(file_id)
+
+            pass
+
+        # 记录(覆盖)新文件索引
+
+        value = f"{info}${metadata}"
+
+        self._redis.set(key, value)
+
+        pass
+
+    def get(self, file_remote: str, file_local: str):
+
+        """
+
+        将远程文件获取至本地
+
+        :param file_remote: 远程文件名(可包含路径)
+        :param file_local:  本地文件名(可包含路径)
+
+        :return: 文件附带的元数据(字典对象)
+
+        """
+
+        # 确保目标路径文件名格式正确
+
+        FastDFS._verify_local_name(file_remote)
+
+        # 分解路径文件名
+
+        directories, name = FastDFS._split(file_remote)
+
+        if name is None:
+
+            raise FastDFS.Error(f"{file_remote} must be a file")
+
+        # 检查文件索引
+
+        key = self._key(directories, name)
+
+        value = self._redis.get(key)
+
+        if not value:
+
+            raise FastDFS.Error(f"{file_remote} not exist")
+
+        # 确保目标路径文件名格式正确
+
+        FastDFS._verify_local_name(file_local)
+
+        # 如果需要,创建必要的目录
+
+        FastDFS._ensure_path_for_local_file(file_local)
+
+        # 下载文件
+
+        value = value.decode().split("$")
+
+        info     = json.loads(value[0])
+        metadata = json.loads(value[1])
+
+        file_id = info["id"].encode()
+
+        self._client.download_to_file(file_local, file_id)
+
+        return metadata
+
+    def tree(self):
+
+        root = FSNode(FSNode.Type.Directory, "/")
+
+        cursor = 0
+
+        while True:
+
+            cursor, records = self._redis.scan(cursor, f"{FastDFS._redis_prefix}*")
+
+            for data in records:
+
+                node = root
+
+                data = data.decode()[14:].split(":")  # 14 == len(FastDFS._redis_prefix)
+
+                directories = data[:-1]
+                name        = data[-1]
+
+                for directory in directories:
+
+                    if directory not in node:
+
+                        node = node.add(FSNode(FSNode.Type.Directory, directory))
+
+                        pass
+
+                    else:
+
+                        node = node.get(directory)
+
+                        pass
+
+                    pass
+
+                node.add(FSNode(FSNode.Type.File, name))
+
+                pass
+
+            if cursor == 0:
+
+                break
+                pass
+
+            pass
+
+        return root
+
+    _redis_prefix = "starearth:dfs"
+
+    @staticmethod
+    def _key(directories, name):
+
+        return f"{FastDFS._redis_prefix}:{':'.join(directories)}:{name}" if directories else name
+
+    @staticmethod
+    def _check_local_file(file: str):
+
+        # 必须存在
+
+        if not os.path.exists(file):
+
+            raise FastDFS.Error(f"{file} not exist")
+
+        # 不能是目录
+
+        if os.path.isdir(file):
+
+            raise FastDFS.Error(f"{file} must be a file")
+
+        pass
+
+    @staticmethod
+    def _verify_local_name(file: str):
+
+        # todo 校验本地路径文件名是否合法,可能涉及多平台
+        # raise
+        pass
+
+    @staticmethod
+    def _verify_remote_name(file: str):
+
+        # todo 校验远程路径文件名是否合法,仅使用 UNIX 路径文件风格
+        # raise
+        pass
+
+    @staticmethod
+    def _ensure_path_for_local_file(file: str):
+
+        # todo 如果 file_local 中路径不存在,需要创建
+        # raise
+        pass
+
+    @staticmethod
+    def _split(file: str):
+        """
+
+        将文件字符串分解为目录数组和文件名
+
+        :param file: 文件字符串
+
+        :return: 目录数组,文件名
+
+        """
+
+        # 规范首斜杠
+
+        if not file.startswith("/"):
+
+            file = f"/{file}"
+
+            pass
+
+        array = file.split("/")[1:]
+
+        directories = array[:-1]
+        name        = None if array[-1] == "" else array[-1]
+
+        return directories, name
+
+    def _upload(self, file: str):
+
+        try:
+
+            result = self._client.upload_by_filename(file)
+
+            info = \
+                {
+                    "group": result["Group name"    ].decode(),
+                    "id":    result["Remote file_id"].decode(),
+                    "size":  result["Uploaded size" ]
+                }
+
+            return info
+
+        except Exception as error:
+
+            raise FastDFS.Error(f"{file} uploading error: {error}")
+
+        pass
+
+    pass
+
+
+class StorageFDFS:
+
+    # todo
+    #  目前可以使用 FastDFS 进行通用文件的操作
+    #  在此基础上,使用更早之前写的文件遍历逻辑
+    #  将瓦片结果目录下的数据放至 FastDFS
+    #  如果需要传入来自 config 的配置,自行修改 FastDFS 构造函数
+    #  注意:FastDFS 使用的 Redis 和任务管理使用的 Redis 不是同一个
+
+    # 示例
+
+    """
+        try:
+
+            fs = FastDFS()
+
+            fs.put("./aaa/bbb/default/xyz.ptp", "/some/dir/xyz1.ptp", { "可以改名": "可以不改名" })
+            fs.put("./aaa/bbb/default/xyz2.ptp", "/some/dir/", { "可以只使用路径,自动使用原名": "但是后面必须跟斜杠,不然会被认为是文件名" })
+
+            metadata = fs.get("some/dir/xyz3.ptp", "./xyz2.ptp", { "同名文件": "会被覆盖" })
+
+            print(metadata)  # { "同名文件": "会被覆盖" }
+
+            # 后期
+
+            tree = fs.tree()
+
+            print(tree)  # 输出整个目录结构
+
+            # 使用 for in 遍历
+
+            for node in tree.children.items():
+
+                pass
+
+            pass
+
+        except Exception as error:
+
+            print(error)
+
+            pass
+    """
+
+    pass
+

+ 299 - 0
src/starearth/storage/mbtiles.py

@@ -0,0 +1,299 @@
+import json
+import os
+import hashlib
+import shutil
+import sqlite3
+from io import BytesIO
+import mercantile
+from multiprocessing import Process
+from PIL import Image
+
+
+
+def mbtiles_merge(output_path, storage_dic, tileset, package_name, package_path_list):
+
+    # 遍历package_path_list有则合并没则拷贝
+    for package_path in package_path_list:
+        date = package_path.split('/')[-1]
+        # 合包
+        if os.path.exists(os.path.join(output_path, date, package_name + '.mbtiles')):
+            con1 = sqlite_connect(os.path.join(package_path, package_name + '.mbtiles'))
+            con2 = sqlite_connect(os.path.join(output_path, date, package_name + '.mbtiles'))
+            if not con1:
+                raise Exception(u'数据库连接失败:%s' % os.path.join(package_path, package_name + '.mbtiles'))
+            if not con2:
+                raise Exception(u'数据库连接失败:%s' % os.path.join(output_path, date, package_name + '.mbtiles'))
+
+            cur1 = con1.cursor()
+            cur2 = con2.cursor()
+            optimize_connection(cur1)
+            optimize_connection(cur2)
+
+            cur1.execute("""SELECT * FROM map;""")
+            tiles = cur1.fetchall()
+
+            for index, tile in enumerate(tiles):
+                index += 1
+                z, x, y = tile[0], tile[1], tile[2]
+
+                # 写入瓦片
+                cur1.execute(
+                    """SELECT tile_data FROM tiles WHERE zoom_level=%s AND tile_column=%s AND tile_row=%s;""" % (
+                        z, x, y))
+                result1 = cur1.fetchone()
+                if result1:
+                    tile_content1 = result1[0]
+                else:
+                    continue
+
+                cur2.execute(
+                    """SELECT tile_data FROM tiles WHERE zoom_level=%s AND tile_column=%s AND tile_row=%s;""" % (
+                        z, x, y))
+                result2 = cur2.fetchone()
+                if result2:
+                    tile_content2 = result2[0]
+                    # PIL
+                    img1 = Image.open(BytesIO(tile_content1)).convert("RGBA")
+                    img2 = Image.open(BytesIO(tile_content2)).convert("RGBA")
+                    mask = img1.getchannel('A')
+                    img = Image.composite(img1, img2, mask)
+                    tile_content = img.tobytes()
+
+                    cur2.execute(
+                        """SELECT tile_id FROM map WHERE zoom_level=? AND tile_column=? AND tile_row=?""",
+                        (z, x, y))
+                    tile_id = cur2.fetchone()[0]
+
+                    new_tile_id = hashlib.md5(tile_content).hexdigest()
+                    new_tile_id = new_tile_id.split(r'  ')[0]
+                    # 写入images
+                    # print(z, x, y, tile_id)
+                    cur2.execute("""UPDATE OR IGNORE images SET tile_data=?,tile_id=? WHERE tile_id=?;""",
+                                 (sqlite3.Binary(tile_content), new_tile_id, tile_id))
+                    # 写入map
+                    cur2.execute(
+                        """UPDATE OR IGNORE map SET tile_id=? WHERE tile_id=?;""", (new_tile_id, tile_id))
+                else:
+                    # 直接写入
+                    tile_content = tile_content1
+                    tile_id = tile[3]
+                    cur2.execute(
+                        """INSERT OR IGNORE INTO images (tile_data, tile_id) VALUES (?, ?);""",
+                        (sqlite3.Binary(tile_content), tile_id))
+                    cur2.execute(
+                        """INSERT OR IGNORE INTO map (zoom_level, tile_column, tile_row, tile_id) VALUES (?,?,?,?);""",
+                        (z, x, y, tile_id))
+
+            con2.commit()
+            optimize_database(con2)
+            con1.close()
+            con2.close()
+
+        else:
+
+            if not os.path.exists(os.path.join(output_path, date)):
+                # 创建路径 输出约定路径 + output_dir+ 日期
+                os.makedirs(os.path.join(output_path, date), exist_ok=True)
+
+            shutil.move(os.path.join(package_path, package_name + '.mbtiles'),
+                        os.path.join(output_path, date, package_name + '.mbtiles'))
+
+
+def optimize_connection(cur):
+    cur.execute("""PRAGMA synchronous=0""")
+    cur.execute("""PRAGMA locking_mode=EXCLUSIVE""")
+    cur.execute("""PRAGMA journal_mode=DELETE""")
+
+
+def flip_y(zoom, y):
+    return (2 ** zoom - 1) - y
+
+
+def sqlite_connect(package_file):
+    try:
+        return sqlite3.connect(package_file)
+    except Exception as e:
+        print(e)
+        return None
+
+
+def optimize_database(cur):
+    cur.execute("""ANALYZE;""")
+
+    cur.isolation_level = None
+    cur.execute("""VACUUM;""")
+    cur.isolation_level = ''
+
+
+def mbtiles_setup(cur):
+    # 创建表
+    cur.execute(
+        """
+        CREATE TABLE IF NOT EXISTS map (
+           zoom_level INTEGER,
+           tile_column INTEGER,
+           tile_row INTEGER,
+           tile_id TEXT
+        );
+        """
+    )
+    cur.execute(
+        """
+        CREATE TABLE IF NOT EXISTS images (
+            tile_data BLOB,
+            tile_id TEXT
+        );
+        """
+    )
+    cur.execute(
+        """
+        CREATE TABLE IF NOT EXISTS metadata (
+            name TEXT,
+            value TEXT
+        );
+        """
+    )
+
+    # 创建索引
+    cur.execute(
+        """
+        CREATE UNIQUE INDEX IF NOT EXISTS map_index ON map (zoom_level, tile_column, tile_row);
+        """
+    )
+    cur.execute(
+        """
+        CREATE UNIQUE INDEX IF NOT EXISTS images_id ON images (tile_id);
+        """
+    )
+    cur.execute(
+        """
+        CREATE UNIQUE INDEX IF NOT EXISTS name ON metadata (name);
+        """
+    )
+
+    # 创建视图
+    cur.execute(
+        """
+        CREATE VIEW IF NOT EXISTS tiles AS
+            SELECT
+                map.zoom_level AS zoom_level,
+                map.tile_column AS tile_column,
+                map.tile_row AS tile_row,
+                images.tile_data AS tile_data
+            FROM map JOIN images ON images.tile_id = map.tile_id;
+        """
+    )
+
+
+class StorageMbtiles:
+    def __init__(self, package_path, tiles_path):
+
+        self.package_path = package_path  # ptp的临时目录
+        self.tiles_path = tiles_path  # 瓦片的临时目录
+
+        pass
+
+    def calc_package_name(self, x, y, z, base_list):
+
+        for base in base_list:
+
+            minz, maxz, basez = base[0], base[1], base[2]
+
+            if minz <= z <= maxz:
+                return "{minz}-{maxz}-{basez}-{basex}-{basey}".format(minz=minz, maxz=maxz, basez=basez,
+                                                                      basex=x // 2 ** (z - basez),
+                                                                      basey=y // 2 ** (z - basez))
+        raise Exception('zoom out of range base_list.')
+
+    def save(self, package_file_dir, tile_dir, tile_list, package_rule, tile_format):
+
+        for x, y, z in tile_list:  # 瓦片对象
+            package_file_name = self.calc_package_name(x, y, z, package_rule)
+            min_z, max_z, base_z, pack_x, pack_y = map(lambda _x: int(_x), package_file_name.split('-'))
+
+            package_file_path = str(os.path.join(str(package_file_dir), package_file_name + '.mbtiles'))
+
+            tile_file_name = '{}_{}_{}.{}'.format(z, y, x, tile_format)
+            tile_file = os.path.join(tile_dir, 'tiles', tile_file_name)
+
+            con = sqlite_connect(package_file_path)
+            if not con:
+                raise Exception(u'数据库连接失败:%s' % package_file_path)
+            cur = con.cursor()
+            optimize_connection(cur)
+            mbtiles_setup(cur)
+
+            with open(tile_file, 'rb') as f:
+                tile_content = f.read()
+
+            # 根据内容计算tile_id, 相同的瓦片会丢掉,全透明或全白
+            tile_id = hashlib.md5(tile_content).hexdigest()
+            tile_id = tile_id.split(r'  ')[0]
+            y = flip_y(z, y)
+
+            # 写入images
+            cur.execute(
+                """INSERT OR IGNORE INTO images (tile_data, tile_id) VALUES (?, ?); """,
+                (sqlite3.Binary(tile_content), tile_id))
+
+            # 写入map
+            cur.execute(
+                """INSERT OR IGNORE INTO map (zoom_level, tile_column, tile_row, tile_id) VALUES (?, ?, ?, ?);""",
+                (z, x, y, tile_id))
+
+            # 写metadata
+            bounds = mercantile.bounds(pack_x, pack_y, base_z)
+
+            metadata = {
+                "name": package_file_name,
+                "format": tile_format,
+                "bounds": ",".join([str(i) for i in bounds]),
+                "center": "%s,%s,%s" % (
+                    str((bounds[0] + bounds[2]) / 2.0), str((bounds[1] + bounds[3]) / 2.0), str(min_z)),
+                "minzoom": str(min_z),
+                "maxzoom": str(max_z),
+                "attribution": '',
+                "description": '',
+                "type": 'baselayer',
+                "version": '1',
+            }
+
+            for name, value in metadata.items():
+                cur.execute('REPLACE INTO metadata (name,value) VALUES (?,?)', (name, value))
+
+            con.commit()
+            optimize_database(con)
+            con.close()
+
+        if os.path.exists(os.path.join(tile_dir, 'imagesheet.geojsonl')):
+            shutil.move(os.path.join(tile_dir, 'imagesheet.geojsonl'),
+                        os.path.join(package_file_dir, 'imagesheet.geojsonl'))
+
+        if os.path.exists(os.path.join(tile_dir, 'meta.json')):
+            shutil.move(os.path.join(tile_dir, 'meta.json'), os.path.join(package_file_dir, 'meta.json'))
+
+    def storage(self, package_rule, tile_format):
+
+        p_list = []
+
+        for date in os.listdir(str(self.tiles_path)):
+
+            _tile_dir = os.path.join(str(self.tiles_path), date)
+            _package_file_dir = os.path.join(self.package_path, date)
+
+            if not os.path.exists(_package_file_dir):
+                os.makedirs(_package_file_dir, exist_ok=True)
+
+            with open(os.path.join(_tile_dir, 'tiles_list.json'), 'r') as f:
+                _tile_list = json.loads(f.read())
+                p_list.append(Process(target=self.save,
+                                      args=(_package_file_dir, _tile_dir, _tile_list, package_rule, tile_format)))
+        for p in p_list:
+            p.start()
+
+        for p in p_list:
+            p.join()
+
+        for p in p_list:
+            if p.exitcode != 0:
+                raise Exception('多进程打包执行失败.')

+ 303 - 0
src/starearth/storage/mongo.py

@@ -0,0 +1,303 @@
+import io
+import os
+import json
+import time
+import datetime
+from PIL import Image
+from pymongo import MongoClient
+from mongoengine import (
+    connect,
+    IntField,
+    StringField,
+    FloatField,
+    DateTimeField,
+    ListField,
+    DictField,
+    EmbeddedDocumentListField,
+    EmbeddedDocument,
+    Document,
+    BinaryField,
+)
+
+
+class TileSchema(Document):
+    """
+    tile schema - 存储瓦片,表名由`tiles_` 和`tileset_id`组合
+    """
+    zoom_level  = IntField()    # zoom
+    tile_column = IntField()    # x
+    tile_row    = IntField()    # y
+    tile_date   = IntField()    # 日期换算成时间戳,四舍五入采用int整秒表示,如果数据未提供日期信息,则默认为计算机时间纪元0点,即UTC的1970年1月1日,此时该值为0。
+    tile_data   = BinaryField() # 瓦片的二进制数据
+
+
+class MongoTileStorageEmbDoc(EmbeddedDocument):
+    """
+    MongoTileStorage, 作为 Embedded Document 嵌入到SimpleTileSetEmbDoc中
+    """
+    storage_type = StringField(required=True, default="MongoDB_Doc")
+    path         = StringField()
+
+
+class SimpleTileSetEmbDoc(EmbeddedDocument):
+    """
+    tileset简易对象, 作为 Embedded Document 嵌入到TileSetSchema中
+    """
+    min_zoom     = IntField(default=0)
+    max_zoom     = IntField(default=22)
+    bounds       = ListField(FloatField(required=True, default=[-180, -90, 180, 90]))  # bbox
+    center       = ListField(FloatField(required=True))  # e.g. [76.275329586789, 139.153492567373, 10]
+    tile_storage = EmbeddedDocumentListField(MongoTileStorageEmbDoc)
+
+
+class TileSetSchema(Document):
+    """
+    tileset schema: 多时相瓦片集文档对象
+    """
+    tile_set_id = StringField(unique=True)
+    owner       = StringField()
+    tilejson    = StringField(default="2.1.x")
+    tile_grid   = StringField(default="")
+    name        = StringField(required=True)
+    description = StringField()
+    version     = StringField(default="1.0.0")
+    attribution = StringField()
+    scheme      = StringField(required=True, default="xyz")
+    tiles       = ListField(StringField(required=True, default=[]))  # 瓦片URL数组,通过该地址应能够访问到瓦片,形如"http://mongo_tile_server/{tileset_id}/{z}/{x}/{y}.jpg"
+    min_zoom    = IntField(default=0)
+    max_zoom    = IntField(default=25)
+    bounds      = ListField(FloatField(required=True))  # bbox, [-180, -90, 180, 90]
+    center      = ListField(FloatField(required=True))  # e.g. [76.275329586789, 139.153492567373, 10]
+    # 时相支持
+    msmt_info = DictField()  # //以日期字符串为键,tileset简易对象SimpleTileSetEmbDoc为值
+    date_modified = DateTimeField(default=datetime.datetime.utcnow)
+
+    meta = \
+        {
+            'indexes': ['tile_set_id'],  # 索引
+            'ordering': ['-date_modified'],  # -按时间倒序
+            "collection": 'tile_set_schema',
+        }
+
+
+class StorageMongo:
+    def __init__(self, db_host, db_port, db_name, tileset, tiles_path, min_zoom, max_zoom, merging):
+        self.host    = db_host
+        self.port    = db_port
+        self.db_name = db_name
+        self.merging = merging
+
+        self.tileset_id          = tileset.get('id', '')
+        self.tileset_name        = tileset.get('name', '')
+        self.tileset_owner       = tileset.get('owner', '')
+        self.tileset_tile_grid   = tileset.get('tile_grid', '')
+        self.tileset_attribution = tileset.get('attribution', '')
+        self.tileset_description = tileset.get('description', '')
+
+        self.tiles_path = tiles_path
+        self.min_zoom   = min_zoom
+        self.max_zoom   = max_zoom
+
+        db_client       = MongoClient(self.host, self.port)
+        self.db_client  = db_client[self.db_name]
+
+        self.db_engine_client = connect \
+                (
+                db=self.db_name,
+                host=self.host,
+                port=self.port,
+                alias="default",
+                authentication_source='admin'
+            )
+
+    def storage(self):
+
+        msmt_info = {}
+
+        # 集合名/表名
+        tile_collection_name = 'tiles_{}'.format(self.tileset_id)
+
+        # 瓦片
+        current_collection_db = self.db_client[tile_collection_name]
+        tileset_bbox = []
+
+        # 检测瓦片目录下的不同日期
+
+        abs_tiles_path = self.tiles_path
+
+        # 检测瓦片目录下的不同日期
+        for tile_date in os.listdir(abs_tiles_path):
+
+            date_dir_path = os.path.join(abs_tiles_path, tile_date)
+            tiles_dir_path = os.path.join(date_dir_path, 'tiles')
+
+            # 如果不是文件夹则不继续处理
+            if not os.path.isdir(date_dir_path):
+
+                continue
+
+            # 根据meta.json获取bbox
+            meta_json_path = os.path.join(date_dir_path, 'meta.json')
+            with open(meta_json_path, 'r') as fb:
+                data = json.load(fb)
+                bbox = data['bbox']
+
+            if tileset_bbox == []:
+                tileset_bbox = bbox
+
+            else:
+                _min_lon = min(tileset_bbox[0], bbox[0])
+                _min_lat = min(tileset_bbox[1], bbox[1])
+                _max_lon = max(tileset_bbox[2], bbox[2])
+                _max_lat = max(tileset_bbox[3], bbox[3])
+
+                tileset_bbox = [_min_lon, _min_lat, _max_lon, _max_lat]
+
+                pass
+
+            # 对应瓦片集信息中的时相字段
+            ts = MongoTileStorageEmbDoc(path="mongodb://{host}:{port}/{db}/{tile_set_id}/{tile_date}".format(
+                host=self.host,
+                port=self.port,
+                db=self.db_name,
+                tile_set_id=self.tileset_id,
+                tile_date=tile_date
+            ))
+            a_tile_set = SimpleTileSetEmbDoc(tile_storage=ts)
+
+            msmt_info.update({
+                tile_date: a_tile_set
+            })
+
+            for tile in os.listdir(tiles_dir_path):
+                name, ext = os.path.splitext(tile)
+                # ext = ext.strip('.').lower()
+
+                # 获取当前瓦片的x、y、z
+                z, y, x = name.split('_')
+                tile_path = os.path.join(tiles_dir_path, tile)
+
+                # 将瓦片转换成二进制
+                # byte_image = io.BytesIO()
+                # img = Image.open(tile_path)
+                #
+                # img.save(byte_image, format=ext)
+                # byte_img = byte_image.getvalue()
+
+                with open(tile_path, 'rb') as fb:
+                    data_byte = fb.read()
+
+                # 判断瓦片目录是否是时间格式,并转换成时间戳并取整
+                try:
+                    # 日期换算成时间戳,四舍五入采用int整秒表示,如果数据未提供日期信息,则默认为计算机时间纪元0点,即UTC的1970年1月1日,此时该值为0。
+                    tmp_date = time.strptime(tile_date, "%Y-%m-%d")
+                    time_stamp = int(time.mktime(tmp_date))
+
+                except Exception as err:
+
+                    time_stamp = 0
+
+                # tile schema相关信息
+                tile_schema = {
+                    'zoom_level': int(z),
+                    'tile_column': int(x),
+                    'tile_row': int(y),
+                    'tile_date': time_stamp,
+                    'tile_data': data_byte
+                }
+
+                tile_one = current_collection_db.find_one({'zoom_level': int(z), 'tile_column': int(x), 'tile_row': int(y), 'tile_date': time_stamp, })
+
+                if tile_one is None:
+                    # 插入mongodb数据库中
+                    current_collection_db.insert_one(tile_schema)
+
+                    continue
+
+                tile_data_byte_mongo = tile_one.get('tile_data')
+
+                if self.merging == 0:
+                    # 仅保留原瓦片
+                    continue
+
+                elif self.merging == 1:
+                    # 替换原瓦片
+                    tile_byte = data_byte
+                    pass
+
+                elif self.merging == 2:
+                    # 合并同名瓦片(原瓦片在上)
+                    new_img = Image.open(io.BytesIO(data_byte))
+                    origin_img = Image.open(io.BytesIO(tile_data_byte_mongo))
+
+                    new_img.paste(origin_img, (0, 0), origin_img)
+
+                    image_arr_byte = io.BytesIO()
+                    new_img.save(image_arr_byte, "PNG")
+
+                    tile_byte = image_arr_byte.getvalue()
+
+                    pass
+
+                elif self.merging == 3:
+                    # 合并同名瓦片(原瓦片在下)
+                    new_img = Image.open(io.BytesIO(data_byte))
+                    origin_img = Image.open(io.BytesIO(tile_data_byte_mongo))
+
+                    origin_img.paste(new_img, (0, 0), new_img)
+
+                    image_arr_byte = io.BytesIO()
+                    origin_img.save(image_arr_byte, "PNG")
+
+                    tile_byte = image_arr_byte.getvalue()
+
+                    pass
+
+                current_collection_db.find_one_and_update(
+                    {
+                        'zoom_level': int(z),
+                        'tile_column': int(x),
+                        'tile_row': int(y),
+                        'tile_date': time_stamp,
+                    },
+                    {
+                        "$set": {
+                            'tile_data': tile_byte,
+                        }
+                    },
+                    upsert=True
+                )
+
+        tileset_center = [
+            (tileset_bbox[0] + tileset_bbox[2]) / 2,
+            (tileset_bbox[1] + tileset_bbox[3]) / 2,
+            self.min_zoom
+        ]
+
+        try:
+            # 保存瓦片集信息到mongodb中
+            tile_set = TileSetSchema \
+                    (
+                    tile_set_id=self.tileset_id,
+                    owner=self.tileset_owner,
+                    tilejson='2.1.x',
+                    tile_grid=self.tileset_tile_grid,
+                    name=self.tileset_name,
+                    description=self.tileset_description,
+                    version='1.0.0',
+                    attribution=self.tileset_attribution,
+                    scheme='xyz',
+                    min_zoom=self.min_zoom,
+                    max_zoom=self.max_zoom,
+                    bounds=tileset_bbox,
+                    center=tileset_center,
+                    msmt_info=msmt_info,
+                )
+
+            tile_set.save()
+
+        except Exception as err:
+            # self.tileset_id瓦片集标识可能会重复
+            pass
+
+        pass

+ 87 - 0
src/starearth/storage/osm_zxy.py

@@ -0,0 +1,87 @@
+import os
+import json
+from PIL import Image
+from tilecloud import TileStore, Tile, TileCoord
+
+
+class StorageOSMZXY:
+    def __init__(self, tiles_path, min_zoom, max_zoom, merging, output_path, tile_format):
+        self.tiles_path = tiles_path
+        self.min_zoom = min_zoom
+        self.max_zoom = max_zoom
+        self.merging = merging
+        self.output_path = output_path
+        self.tile_format = tile_format
+
+    def storage(self):
+        # 开始处理切片目录下的瓦片
+
+        date_dir_path = os.path.join(self.tiles_path, 'tiles')
+        tile_list_path = os.path.join(self.tiles_path, 'tiles_list.json')
+
+        # 获取瓦片列表
+        with open(tile_list_path, 'r') as fb:
+            tile_list = json.load(fb)
+
+        # 拼接瓦片源路径和目的路径
+        local_tiles_src = "file://{}/%(z)d_%(y)d_%(x)d.{}".format(date_dir_path, self.tile_format)
+        local_tiles_dst = "file://{}/%(z)d/%(x)d/%(y)d.{}".format(self.output_path, self.tile_format)
+
+        ts_input = TileStore.load(local_tiles_src)
+        ts_output = TileStore.load(local_tiles_dst)
+
+        # 使用瓦片列表构造Tile对象
+        tiles = [Tile(TileCoord(z, x, y)) for x, y, z in tile_list]
+
+        for tile in tiles:
+            tile = ts_input.get_one(tile)
+
+            if tile is None:
+                continue
+
+            if tile in ts_output.get_all():
+                if self.merging == 0:
+                    # 仅保留原瓦片
+                    continue
+
+                elif self.merging == 1:
+                    # 替换原瓦片
+                    ts_output.put_one(tile)
+
+                elif self.merging == 2:
+                    # 合并同名瓦片(原瓦片在上)
+
+                    z, x, y = [int(i) for i in str(tile.tilecoord).split('/')]
+
+                    origin_tile_path = local_tiles_dst.replace('file://', '') % {'z': z, 'x': x, 'y': y}
+                    new_tile_path = local_tiles_src.replace('file://', '') % {'z': z, 'x': x, 'y': y}
+
+                    origin_img = Image.open(origin_tile_path)
+                    new_img = Image.open(new_tile_path)
+
+                    new_img.paste(origin_img, (0, 0), origin_img)
+
+                    new_img.save(origin_tile_path)
+
+                    pass
+
+                elif self.merging == 3:
+                    # 合并同名瓦片(原瓦片在下)
+
+                    z, x, y = [int(i) for i in str(tile.tilecoord).split('/')]
+
+                    origin_tile_path = local_tiles_dst.replace('file://', '') % {'z': z, 'x': x, 'y': y}
+                    new_tile_path = local_tiles_src.replace('file://', '') % {'z': z, 'x': x, 'y': y}
+
+                    origin_img = Image.open(origin_tile_path)
+                    new_img = Image.open(new_tile_path)
+
+                    origin_img.paste(new_img, (0, 0), new_img)
+
+                    origin_img.save(origin_tile_path)
+
+                    pass
+
+            else:
+                # 存储瓦片
+                ts_output.put_one(tile)

+ 242 - 0
src/starearth/storage/ptp.py

@@ -0,0 +1,242 @@
+import io
+import json
+import os
+import shutil
+import traceback
+from multiprocessing import Process
+
+from PIL import Image
+from map_lib.mtp.mtp_writer import MtpWriter
+from map_lib.mtp.mtp_reader import MtpReader
+from map_lib.storage.file import FileStorage
+from map_lib.mtp.mtp_modifier import MtpModifier
+from map_lib.mtp.mtp_batch import MtpBatch
+from starearth.utils import private_key, dev_key
+
+
+def create_ptp(ptp_path, map_name='satellite', map_id='r_google', tile_ext='jpg', tile_size=256, mtp_id='PTP'):
+    """创建ptp包"""
+
+    fs = FileStorage()
+    fs.create(ptp_path)
+    tile_ranges = {}
+    ptp_name = os.path.basename(ptp_path).split('.')[0]
+    min_zoom_level, max_zoom_level, base_zoom_level, base_x, base_y = ptp_name.split('-')
+
+    min_zoom_level, max_zoom_level, base_zoom_level, base_x, base_y = \
+        int(min_zoom_level), int(max_zoom_level), int(base_zoom_level), int(base_x), int(base_y)
+
+    for zoom_level in range(min_zoom_level, max_zoom_level + 1):
+        start_x = pow(2, zoom_level - base_zoom_level) * base_x
+        start_y = pow(2, zoom_level - base_zoom_level) * base_y
+
+        tile_ranges[zoom_level] = [start_x, start_x + pow(2, zoom_level - base_zoom_level) - 1,
+                                   start_y, start_y + pow(2, zoom_level - base_zoom_level) - 1]
+
+    ranges = {str(k): tile_ranges[k] for k in sorted(tile_ranges.keys())}
+
+    metadata = {
+        'map_name': map_name,
+        'packet_name': ptp_name,
+        'map_id': map_id,
+        'min_zoom_level': min_zoom_level,
+        'max_zoom_level': max_zoom_level,
+        'tile_file_ext': tile_ext,
+        'bounds': (-180, -90, 180, 90)
+    }
+
+    mtp_writer = MtpWriter(fs,  # FileStorage 句柄
+                           private_key,  # 私有key
+                           dev_key,  # 设备key
+                           metadata,  # mtp 包的元数据
+                           ranges,  # 包含的瓦片层,创建后不能修改
+                           tile_size,  # 瓦片尺寸,该实例为 256 x 256
+                           mtp_id)  # mtp 包的ID,例如:PTP、DTP
+    return mtp_writer
+
+
+class StoragePTP:
+    def __init__(self, ptp_path, tiles_path):
+
+        self.ptp_path = ptp_path  # ptp的临时目录
+        self.tiles_path = tiles_path  # 瓦片的临时目录
+
+        pass
+
+    def get_ptp_name(self, x, y, z, base_list):
+
+        for base in base_list:
+            minz, maxz, basez = base[0], base[1], base[2]
+            if minz <= z <= maxz:
+                return "{minz}-{maxz}-{basez}-{basex}-{basey}".format(minz=minz, maxz=maxz, basez=basez,
+                                                                      basex=x // 2 ** (z - basez),
+                                                                      basey=y // 2 ** (z - basez))
+        raise Exception('zoom out of range base_list.')
+
+    def save(self, package_file_dir, tile_dir, tile_list, package_rule, tile_format, tile_size):
+        """瓦片存储到ptp文件中"""
+
+        tiles_path = os.path.join(tile_dir, 'tiles')
+        for x, y, z in tile_list:  # 瓦片对象
+            package_file_name = self.get_ptp_name(x, y, z, package_rule)
+            package_file_path = str(os.path.join(str(package_file_dir), package_file_name + '.ptp'))
+            tile_file_name = '{}_{}_{}.{}'.format(z, y, x, tile_format)
+
+            tile_file = os.path.join(tiles_path, tile_file_name)
+
+            try:
+                with open(tile_file, 'rb') as f:
+                    tile_content = f.read()
+            except Exception as err:
+                traceback.print_exc()
+
+                continue
+
+            if os.path.exists(package_file_path):
+                fs = FileStorage()
+                fs.open(package_file_path, "rb+")
+                mtp_writer = MtpModifier(fs, private_key, dev_key)
+
+            else:
+                if tile_format == 'jpeg':
+
+                    mtp_writer = create_ptp(package_file_path)
+
+                elif tile_format == 'png':
+
+                    mtp_writer = create_ptp(package_file_path, tile_ext='png', tile_size=tile_size)
+
+                elif tile_format == 'bil':
+
+                    mtp_writer = create_ptp(ptp_path=package_file_path, map_name='dem', map_id='r_google',
+                                                 tile_ext='bil', tile_size=128, mtp_id='PTP')
+
+                else:
+
+                    mtp_writer = create_ptp(package_file_path)
+
+            mtp_writer.add_tile_with_data(tile_content, z, x, y)
+
+            mtp_writer.close()
+
+        if os.path.exists(os.path.join(tile_dir, 'imagesheet.geojsonl')):
+            shutil.move(os.path.join(tile_dir, 'imagesheet.geojsonl'),
+                        os.path.join(package_file_dir, 'imagesheet.geojsonl'))
+
+        if os.path.exists(os.path.join(tile_dir, 'meta.json')):
+
+            shutil.move(os.path.join(tile_dir, 'meta.json'), os.path.join(package_file_dir, 'meta.json'))
+
+    def storage(self, package_rule, tile_format, tile_size):
+
+        p_list = []
+
+        for date in os.listdir(str(self.tiles_path)):
+
+            _tile_dir = os.path.join(str(self.tiles_path), date)
+            _package_file_dir = os.path.join(self.ptp_path, date)
+
+            if not os.path.exists(_package_file_dir):
+                os.makedirs(_package_file_dir, exist_ok=True)
+
+            with open(os.path.join(_tile_dir, 'tiles_list.json'), 'r') as f:
+
+                _tile_list = json.loads(f.read())
+                p_list.append(Process(target=self.save,
+                                      args=(_package_file_dir, _tile_dir, _tile_list, package_rule, tile_format, tile_size)))
+        for p in p_list:
+            p.start()
+
+        for p in p_list:
+            p.join()
+
+        for p in p_list:
+            if p.exitcode != 0:
+                raise Exception('多进程打包执行失败.')
+
+
+def ptp_merge(output_path, storage_dic, tileset, package_name, package_path_list):
+    # 遍历package_path_list有则合并没则拷贝
+    for package_path in package_path_list:
+        date = package_path.split('/')[-1]
+
+        outpath_ptp = os.path.join(output_path, date, package_name + '.ptp')
+        new_ptp = os.path.join(package_path, package_name + '.ptp')
+        # 合包
+        if os.path.exists(outpath_ptp):
+
+            # 该合包方式无法处理半个瓦片的情况,故不使用
+            # MtpBatch.merge(os.path.join(package_path, package_name + '.ptp'),
+            #                os.path.join(output_path, date, package_name + '.ptp'), private_key, dev_key)
+
+            if not os.path.exists(new_ptp):
+                continue
+
+            output_ptp_bak = outpath_ptp + ".bak"
+            shutil.move(outpath_ptp, output_ptp_bak)
+
+            # 以下合包方式参照旧版slice
+            fs1 = FileStorage()
+            fs1.open(output_ptp_bak)
+            mtp_reader1 = MtpReader(fs1, private_key, dev_key)
+
+            fs2 = FileStorage()
+            fs2.open(new_ptp)
+            mtp_reader2 = MtpReader(fs2, private_key, dev_key)
+
+            metadata = mtp_reader1.metadata
+            map_name = metadata.get('map_name')
+            map_id = metadata.get('map_id')
+            tile_ext = metadata.get('tile_file_ext')
+
+            mtp_writer = create_ptp(outpath_ptp, map_name=map_name, map_id=map_id, tile_ext=tile_ext)
+
+            for level, tile_range in mtp_reader2.tile_ranges.items():
+                minx, maxx, miny, maxy, _ = tile_range
+                for x in range(minx, maxx + 1):
+                    for y in range(miny, maxy + 1):
+
+                        buff1 = mtp_reader1.get_tile(level, x, y)
+                        buff2 = mtp_reader2.get_tile(level, x, y)
+
+                        if not buff1 and not buff2:
+                            continue
+
+                        if not buff1 and buff2:
+                            mtp_writer.add_tile_with_data(buff2, level, x, y)
+
+                        if not buff2 and buff1:
+                            mtp_writer.add_tile_with_data(buff1, level, x, y)
+
+                        if buff2 and buff1:
+
+                            bytes = io.BytesIO(buff1)
+                            img1 = Image.open(bytes)
+
+                            bytes2 = io.BytesIO(buff2)
+                            img2 = Image.open(bytes2)
+
+                            img1.paste(img2, (0, 0), img2)
+
+                            output_io = io.BytesIO()
+                            img1.save(output_io, format=img1.format)
+
+                            mtp_writer.add_tile_with_data(output_io.getvalue(), level, x, y)
+
+                            del img1
+                            del img2
+
+            del mtp_reader1
+            del mtp_reader2
+            del mtp_writer
+
+            if os.path.exists(output_ptp_bak):
+                os.remove(output_ptp_bak)
+
+        else:
+
+            if not os.path.exists(os.path.join(output_path, date)):
+                # 创建路径 输出约定路径 + output_dir + 日期
+                os.makedirs(os.path.join(output_path, date), exist_ok=True)
+
+            shutil.move(os.path.join(package_path, package_name + '.ptp'), os.path.join(output_path, date, package_name + '.ptp'))

+ 305 - 0
src/starearth/storage/smtiles.py

@@ -0,0 +1,305 @@
+import datetime
+import hashlib
+import json
+import os
+import shutil
+import sqlite3
+import mercantile
+from io import BytesIO
+from multiprocessing import Process
+from PIL import Image
+
+
+def optimize_connection(cur):
+    cur.execute("""PRAGMA synchronous=0""")
+    cur.execute("""PRAGMA locking_mode=EXCLUSIVE""")
+    cur.execute("""PRAGMA journal_mode=DELETE""")
+
+
+def sqlite_connect(package_file):
+    try:
+        return sqlite3.connect(package_file)
+    except Exception as e:
+        print(e)
+        return None
+
+
+def optimize_database(cur):
+    cur.execute("""ANALYZE;""")
+
+    cur.isolation_level = None
+    cur.execute("""VACUUM;""")
+    cur.isolation_level = ''
+
+
+def smtiles_setup(cur):
+    # 创建表
+    cur.execute(
+        """
+        CREATE TABLE IF NOT EXISTS map (
+           zoom_level INTEGER,
+           tile_column INTEGER,
+           tile_row INTEGER,
+           tile_id TEXT,
+           resolution TEXT,
+           create_time TEXT 
+        );
+        """
+    )
+    cur.execute(
+        """
+        CREATE TABLE IF NOT EXISTS images (
+            tile_data BLOB,
+            tile_id TEXT
+        );
+        """
+    )
+    cur.execute(
+        """
+        CREATE TABLE IF NOT EXISTS metadata (
+            name TEXT,
+            value TEXT
+
+        );
+        """
+    )
+
+    # 创建索引
+    cur.execute(
+        """
+        CREATE UNIQUE INDEX IF NOT EXISTS map_index ON map (zoom_level, tile_column, tile_row, resolution);
+        """
+    )
+    cur.execute(
+        """
+        CREATE UNIQUE INDEX IF NOT EXISTS images_id ON images (tile_id);
+        """
+    )
+    cur.execute(
+        """
+        CREATE UNIQUE INDEX IF NOT EXISTS name ON metadata (name);
+        """
+    )
+
+    # 创建视图
+    cur.execute(
+        """
+        CREATE VIEW IF NOT EXISTS tiles AS
+            SELECT
+                map.zoom_level AS zoom_level,
+                map.tile_column AS tile_column,
+                map.tile_row AS tile_row,
+                map.resolution AS resolution, 
+                images.tile_data AS tile_data
+            FROM map JOIN images ON images.tile_id = map.tile_id;
+        """
+    )
+
+
+class StorageSmtiles:
+    def __init__(self, package_path, tiles_path):
+        self.package_path = package_path  # ptp的临时目录
+        self.tiles_path = tiles_path  # 瓦片的临时目录
+
+        pass
+
+    def calc_package_name(self, x, y, z, base_list):
+        for base in base_list:
+            minz, maxz, basez = base[0], base[1], base[2]
+            if minz <= z <= maxz:
+                return "{minz}-{maxz}-{basez}-{basex}-{basey}".format(minz=minz, maxz=maxz, basez=basez,
+                                                                      basex=x // 2 ** (z - basez),
+                                                                      basey=y // 2 ** (z - basez))
+        raise Exception('zoom out of range base_list.')
+
+    def save(self, package_file_dir, tile_dir, tile_list, package_rule, tile_format, param):
+
+        for x, y, z in tile_list:  # 瓦片对象
+            package_file_name = self.calc_package_name(x, y, z, package_rule)
+            min_z, max_z, base_z, pack_x, pack_y = map(lambda _x: int(_x), package_file_name.split('-'))
+
+            package_file_path = str(os.path.join(str(package_file_dir), package_file_name + '.smtiles'))
+
+            tile_file_name = '{}_{}_{}.{}'.format(z, y, x, tile_format)
+            tile_file = os.path.join(tile_dir, 'tiles', tile_file_name)
+
+            con = sqlite_connect(package_file_path)
+            if not con:
+                raise Exception(u'数据库连接失败:%s' % package_file_path)
+            cur = con.cursor()
+            optimize_connection(cur)
+
+            smtiles_setup(cur)
+
+            with open(tile_file, 'rb') as f:
+                tile_content = f.read()
+            # 根据内容计算tile_id, 相同的瓦片会丢掉,全透明或全白
+            tile_id = hashlib.md5(tile_content).hexdigest()
+            tile_id = tile_id.split(r'  ')[0]
+
+            # 写入images
+            cur.execute(
+                """INSERT OR IGNORE INTO images (tile_data, tile_id) VALUES (?, ?); """,
+                (sqlite3.Binary(tile_content), tile_id))
+
+            # 写入map
+            cur.execute(
+                """INSERT OR IGNORE INTO map (zoom_level, tile_column, tile_row, tile_id, create_time) VALUES (?, ?, ?, ?, ?);""",
+                (z, x, y, tile_id, datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")))
+
+            # 写metadata
+            bounds = mercantile.bounds(pack_x, pack_y, base_z)
+
+            metadata = {
+                'name': package_file_name,
+                'type': 'baselayer',
+                'version': '1.1',
+                'description': param.get('description', ''),
+                'format': tile_format,
+                'bounds': ",".join([str(i) for i in bounds]),
+                'ext_spec_version': '201310',
+                'axis_origin': '-180.0,90.0' if param.get(
+                    'tile_grid') == 'WGS1984Quad' else '-20037508.3427892,20037508.3427892',
+                # 经纬度-180.0,90.0  墨卡托 [-20037508.3427892,20037508.3427892]
+                'axis_positive_direction': 'RightDown',
+                'crs_wkid': str(param.get('epsg')),
+                'crs_wkt': str(param.get('wkt')),
+                'tile_height': str(param.get('tile_size')),
+                'tile_width': str(param.get('tile_size')),
+                'resolutions': str(param.get('resolutions')),
+                'scales': '',
+                'transparent': 'false',
+                'mapStatusHashCode': '',
+                'map_parameter': '',
+                'compatible': 'false' if param.get('tile_grid') == 'WGS1984Quad' else 'true'
+                # compatible 经纬度false 墨卡托true
+            }
+
+            for name, value in metadata.items():
+                cur.execute('REPLACE INTO metadata (name,value) VALUES (?,?)', (name, value))
+
+            con.commit()
+            optimize_database(con)
+            con.close()
+
+        if os.path.exists(os.path.join(tile_dir, 'imagesheet.geojsonl')):
+            shutil.move(os.path.join(tile_dir, 'imagesheet.geojsonl'),
+                        os.path.join(package_file_dir, 'imagesheet.geojsonl'))
+
+        if os.path.exists(os.path.join(tile_dir, 'meta.json')):
+            shutil.move(os.path.join(tile_dir, 'meta.json'), os.path.join(package_file_dir, 'meta.json'))
+
+    def storage(self, package_rule, tile_format, param):
+
+        p_list = []
+
+        for date in os.listdir(str(self.tiles_path)):
+
+            _tile_dir = os.path.join(str(self.tiles_path), date)
+            _package_file_dir = os.path.join(self.package_path, date)
+
+            if not os.path.exists(_package_file_dir):
+                os.makedirs(_package_file_dir, exist_ok=True)
+
+            with open(os.path.join(_tile_dir, 'tiles_list.json'), 'r') as f:
+                _tile_list = json.loads(f.read())
+                p_list.append(Process(target=self.save,
+                                      args=(_package_file_dir, _tile_dir, _tile_list, package_rule, tile_format, param)))
+
+        for p in p_list:
+            p.start()
+
+        for p in p_list:
+            p.join()
+
+        for p in p_list:
+            if p.exitcode != 0:
+                raise Exception('多进程打包执行失败.')
+
+
+def smtiles_merge(output_path, storage_dic, tileset, package_name, package_path_list):
+
+    # 遍历package_path_list有则合并没则拷贝
+    for package_path in package_path_list:
+        date = package_path.split('/')[-1]
+        # 合包
+        if os.path.exists(os.path.join(output_path, date, package_name + '.smtiles')):
+            con1 = sqlite_connect(os.path.join(package_path, package_name + '.smtiles'))
+            con2 = sqlite_connect(os.path.join(output_path, date, package_name + '.smtiles'))
+            if not con1:
+                raise Exception(u'数据库连接失败:%s' % os.path.join(package_path, package_name + '.smtiles'))
+            if not con2:
+                raise Exception(u'数据库连接失败:%s' % os.path.join(output_path, date, package_name + '.smtiles'))
+
+            cur1 = con1.cursor()
+            cur2 = con2.cursor()
+            optimize_connection(cur1)
+            optimize_connection(cur2)
+
+            cur1.execute("""SELECT * FROM map;""")
+            tiles = cur1.fetchall()
+
+            for index, tile in enumerate(tiles):
+                z, x, y = tile[0], tile[1], tile[2]
+
+                # 写入瓦片
+                cur1.execute(
+                    """SELECT tile_data FROM tiles WHERE zoom_level=%s AND tile_column=%s AND tile_row=%s;""" % (
+                        z, x, y))
+                result1 = cur1.fetchone()
+                if result1:
+                    tile_content1 = result1[0]
+                else:
+                    continue
+
+                cur2.execute(
+                    """SELECT tile_data FROM tiles WHERE zoom_level=%s AND tile_column=%s AND tile_row=%s;""" % (
+                        z, x, y))
+                result2 = cur2.fetchone()
+                if result2:
+                    tile_content2 = result2[0]
+                    # PIL
+                    img1 = Image.open(BytesIO(tile_content1))
+                    img2 = Image.open(BytesIO(tile_content2))
+                    mask = img1.getchannel('A')
+                    img = Image.composite(img1, img2, mask)
+                    tile_content = img.tobytes()
+
+                    cur2.execute(
+                        """SELECT tile_id FROM map WHERE zoom_level=? AND tile_column=? AND tile_row=?""",
+                        (z, x, y))
+                    tile_id = cur2.fetchone()[0]
+
+                    new_tile_id = hashlib.md5(tile_content).hexdigest()
+                    new_tile_id = new_tile_id.split(r'  ')[0]
+                    # 写入images
+                    # print(z, x, y, tile_id)
+                    cur2.execute("""UPDATE OR IGNORE images SET tile_data=?,tile_id=? WHERE tile_id=?;""",
+                                 (sqlite3.Binary(tile_content), new_tile_id, tile_id))
+                    # 写入map
+                    cur2.execute(
+                        """UPDATE OR IGNORE map SET tile_id=? WHERE tile_id=?;""", (new_tile_id, tile_id))
+                else:
+                    # 直接写入
+                    tile_content = tile_content1
+                    tile_id = tile[3]
+                    cur2.execute(
+                        """INSERT OR IGNORE INTO images (tile_data, tile_id) VALUES (?, ?);""",
+                        (sqlite3.Binary(tile_content), tile_id))
+                    cur2.execute(
+                        """INSERT OR IGNORE INTO map (zoom_level, tile_column, tile_row, tile_id) VALUES (?,?,?,?);""",
+                        (z, x, y, tile_id))
+
+            con2.commit()
+            optimize_database(con2)
+            con1.close()
+            con2.close()
+
+        else:
+
+            if not os.path.exists(os.path.join(output_path, date)):
+                # 创建路径 输出约定路径 + output_dir+ 日期
+                os.makedirs(os.path.join(output_path, date), exist_ok=True)
+
+            shutil.move(os.path.join(package_path, package_name + '.smtiles'),
+                        os.path.join(output_path, date, package_name + '.smtiles'))

+ 41 - 0
src/starearth/storage/storage.py

@@ -0,0 +1,41 @@
+from starearth.task.storage_mongo import TaskStorageMongo
+from starearth.task.ptp_package import TaskStoragePTP
+
+
+class StorageFactory(object):
+    """工厂类,选择创建哪种打包方式"""
+
+    def __init__(self):
+        self._creators = {
+            'StarEarth_PTP':   TaskStoragePTP,
+            'StarEarth_Mongo': TaskStorageMongo,
+        }
+
+    _instance = None
+
+    @staticmethod
+    def _singleton():
+        if StorageFactory._instance is None:
+            StorageFactory._instance = StorageFactory()
+
+            pass
+
+        return StorageFactory._instance
+
+    @staticmethod
+    def create(name, *args):
+        instance = StorageFactory._singleton()
+
+        if name not in instance._creators.keys():
+            raise Exception('{} is invalid '.format(name))
+        creator = instance._creators[name]
+
+        if creator is None:
+            # return None # 或者 raise 'unsupported object type'
+            raise Exception('unsupported object type')
+        return creator(*args)
+
+    @staticmethod
+    def register(name, creator):
+        instance = StorageFactory._singleton()
+        instance._creators[name] = creator

+ 305 - 0
src/starearth/storage/svtiles.py

@@ -0,0 +1,305 @@
+import datetime
+import hashlib
+import json
+import os
+import shutil
+import sqlite3
+import mercantile
+from io import BytesIO
+from multiprocessing import Process
+from PIL import Image
+
+
+def optimize_connection(cur):
+    cur.execute("""PRAGMA synchronous=0""")
+    cur.execute("""PRAGMA locking_mode=EXCLUSIVE""")
+    cur.execute("""PRAGMA journal_mode=DELETE""")
+
+
+def sqlite_connect(package_file):
+    try:
+        return sqlite3.connect(package_file)
+    except Exception as e:
+        print(e)
+        return None
+
+
+def optimize_database(cur):
+    cur.execute("""ANALYZE;""")
+
+    cur.isolation_level = None
+    cur.execute("""VACUUM;""")
+    cur.isolation_level = ''
+
+
+def svtiles_setup(cur):
+    # 创建表
+    cur.execute(
+        """
+        CREATE TABLE IF NOT EXISTS map (
+           zoom_level INTEGER,
+           tile_column INTEGER,
+           tile_row INTEGER,
+           tile_id TEXT,
+           resolution TEXT,
+           create_time TEXT 
+        );
+        """
+    )
+    cur.execute(
+        """
+        CREATE TABLE IF NOT EXISTS images (
+            tile_data BLOB,
+            tile_id TEXT
+        );
+        """
+    )
+    cur.execute(
+        """
+        CREATE TABLE IF NOT EXISTS metadata (
+            name TEXT,
+            value TEXT
+
+        );
+        """
+    )
+
+    # 创建索引
+    cur.execute(
+        """
+        CREATE UNIQUE INDEX IF NOT EXISTS map_index ON map (zoom_level, tile_column, tile_row, resolution);
+        """
+    )
+    cur.execute(
+        """
+        CREATE UNIQUE INDEX IF NOT EXISTS images_id ON images (tile_id);
+        """
+    )
+    cur.execute(
+        """
+        CREATE UNIQUE INDEX IF NOT EXISTS name ON metadata (name);
+        """
+    )
+
+    # 创建视图
+    cur.execute(
+        """
+        CREATE VIEW IF NOT EXISTS tiles AS
+            SELECT
+                map.zoom_level AS zoom_level,
+                map.tile_column AS tile_column,
+                map.tile_row AS tile_row,
+                map.resolution AS resolution, 
+                images.tile_data AS tile_data
+            FROM map JOIN images ON images.tile_id = map.tile_id;
+        """
+    )
+
+
+class StorageSvtiles:
+    def __init__(self, package_path, tiles_path):
+        self.package_path = package_path  # ptp的临时目录
+        self.tiles_path = tiles_path  # 瓦片的临时目录
+
+        pass
+
+    def calc_package_name(self, x, y, z, base_list):
+        for base in base_list:
+            minz, maxz, basez = base[0], base[1], base[2]
+            if minz <= z <= maxz:
+                return "{minz}-{maxz}-{basez}-{basex}-{basey}".format(minz=minz, maxz=maxz, basez=basez,
+                                                                      basex=x // 2 ** (z - basez),
+                                                                      basey=y // 2 ** (z - basez))
+        raise Exception('zoom out of range base_list.')
+
+    def save(self, package_file_dir, tile_dir, tile_list, package_rule, tile_format, param):
+
+        for x, y, z in tile_list:  # 瓦片对象
+            package_file_name = self.calc_package_name(x, y, z, package_rule)
+            min_z, max_z, base_z, pack_x, pack_y = map(lambda _x: int(_x), package_file_name.split('-'))
+
+            package_file_path = str(os.path.join(str(package_file_dir), package_file_name + '.svtiles'))
+
+            tile_file_name = '{}_{}_{}.{}'.format(z, y, x, tile_format)
+            tile_file = os.path.join(tile_dir, 'tiles', tile_file_name)
+
+            con = sqlite_connect(package_file_path)
+            if not con:
+                raise Exception(u'数据库连接失败:%s' % package_file_path)
+            cur = con.cursor()
+            optimize_connection(cur)
+
+            svtiles_setup(cur)
+
+            with open(tile_file, 'rb') as f:
+                tile_content = f.read()
+            # 根据内容计算tile_id, 相同的瓦片会丢掉,全透明或全白
+            tile_id = hashlib.md5(tile_content).hexdigest()
+            tile_id = tile_id.split(r'  ')[0]
+
+            # 写入images
+            cur.execute(
+                """INSERT OR IGNORE INTO images (tile_data, tile_id) VALUES (?, ?); """,
+                (sqlite3.Binary(tile_content), tile_id))
+
+            # 写入map
+            cur.execute(
+                """INSERT OR IGNORE INTO map (zoom_level, tile_column, tile_row, tile_id, create_time) VALUES (?, ?, ?, ?, ?);""",
+                (z, x, y, tile_id, datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")))
+
+            # 写metadata
+            bounds = mercantile.bounds(pack_x, pack_y, base_z)
+
+            metadata = {
+                'name': package_file_name,
+                'type': 'baselayer',
+                'version': '1.1',
+                'description': param.get('description', ''),
+                'format': tile_format,
+                'bounds': ",".join([str(i) for i in bounds]),
+                'ext_spec_version': '201310',
+                'axis_origin': '-180.0,90.0' if param.get(
+                    'tile_grid') == 'WGS1984Quad' else '-20037508.3427892,20037508.3427892',
+                # 经纬度-180.0,90.0  墨卡托 [-20037508.3427892,20037508.3427892]
+                'axis_positive_direction': 'RightDown',
+                'crs_wkid': str(param.get('epsg')),
+                'crs_wkt': str(param.get('wkt')),
+                'tile_height': str(param.get('tile_size')),
+                'tile_width': str(param.get('tile_size')),
+                'resolutions': str(param.get('resolutions')),
+                'scales': '',
+                'transparent': 'false',
+                'mapStatusHashCode': '',
+                'map_parameter': '',
+                'compatible': 'false' if param.get('tile_grid') == 'WGS1984Quad' else 'true'
+                # compatible 经纬度false 墨卡托true
+            }
+
+            for name, value in metadata.items():
+                cur.execute('REPLACE INTO metadata (name,value) VALUES (?,?)', (name, value))
+
+            con.commit()
+            optimize_database(con)
+            con.close()
+
+        if os.path.exists(os.path.join(tile_dir, 'imagesheet.geojsonl')):
+            shutil.move(os.path.join(tile_dir, 'imagesheet.geojsonl'),
+                        os.path.join(package_file_dir, 'imagesheet.geojsonl'))
+
+        if os.path.exists(os.path.join(tile_dir, 'meta.json')):
+            shutil.move(os.path.join(tile_dir, 'meta.json'), os.path.join(package_file_dir, 'meta.json'))
+
+    def storage(self, package_rule, tile_format, param):
+
+        p_list = []
+
+        for date in os.listdir(str(self.tiles_path)):
+
+            _tile_dir = os.path.join(str(self.tiles_path), date)
+            _package_file_dir = os.path.join(self.package_path, date)
+
+            if not os.path.exists(_package_file_dir):
+                os.makedirs(_package_file_dir, exist_ok=True)
+
+            with open(os.path.join(_tile_dir, 'tiles_list.json'), 'r') as f:
+                _tile_list = json.loads(f.read())
+                p_list.append(Process(target=self.save,
+                                      args=(_package_file_dir, _tile_dir, _tile_list, package_rule, tile_format, param)))
+
+        for p in p_list:
+            p.start()
+
+        for p in p_list:
+            p.join()
+
+        for p in p_list:
+            if p.exitcode != 0:
+                raise Exception('多进程打包执行失败.')
+
+
+def svtiles_merge(output_path, storage_dic, tileset, package_name, package_path_list):
+
+    # 遍历package_path_list有则合并没则拷贝
+    for package_path in package_path_list:
+        date = package_path.split('/')[-1]
+        # 合包
+        if os.path.exists(os.path.join(output_path, date, package_name + '.svtiles')):
+            con1 = sqlite_connect(os.path.join(package_path, package_name + '.svtiles'))
+            con2 = sqlite_connect(os.path.join(output_path, date, package_name + '.svtiles'))
+            if not con1:
+                raise Exception(u'数据库连接失败:%s' % os.path.join(package_path, package_name + '.svtiles'))
+            if not con2:
+                raise Exception(u'数据库连接失败:%s' % os.path.join(output_path, date, package_name + '.svtiles'))
+
+            cur1 = con1.cursor()
+            cur2 = con2.cursor()
+            optimize_connection(cur1)
+            optimize_connection(cur2)
+
+            cur1.execute("""SELECT * FROM map;""")
+            tiles = cur1.fetchall()
+
+            for index, tile in enumerate(tiles):
+                z, x, y = tile[0], tile[1], tile[2]
+
+                # 写入瓦片
+                cur1.execute(
+                    """SELECT tile_data FROM tiles WHERE zoom_level=%s AND tile_column=%s AND tile_row=%s;""" % (
+                        z, x, y))
+                result1 = cur1.fetchone()
+                if result1:
+                    tile_content1 = result1[0]
+                else:
+                    continue
+
+                cur2.execute(
+                    """SELECT tile_data FROM tiles WHERE zoom_level=%s AND tile_column=%s AND tile_row=%s;""" % (
+                        z, x, y))
+                result2 = cur2.fetchone()
+                if result2:
+                    tile_content2 = result2[0]
+                    # PIL
+                    img1 = Image.open(BytesIO(tile_content1))
+                    img2 = Image.open(BytesIO(tile_content2))
+                    mask = img1.getchannel('A')
+                    img = Image.composite(img1, img2, mask)
+                    tile_content = img.tobytes()
+
+                    cur2.execute(
+                        """SELECT tile_id FROM map WHERE zoom_level=? AND tile_column=? AND tile_row=?""",
+                        (z, x, y))
+                    tile_id = cur2.fetchone()[0]
+
+                    new_tile_id = hashlib.md5(tile_content).hexdigest()
+                    new_tile_id = new_tile_id.split(r'  ')[0]
+                    # 写入images
+                    # print(z, x, y, tile_id)
+                    cur2.execute("""UPDATE OR IGNORE images SET tile_data=?,tile_id=? WHERE tile_id=?;""",
+                                 (sqlite3.Binary(tile_content), new_tile_id, tile_id))
+                    # 写入map
+                    cur2.execute(
+                        """UPDATE OR IGNORE map SET tile_id=? WHERE tile_id=?;""", (new_tile_id, tile_id))
+                else:
+                    # 直接写入
+                    tile_content = tile_content1
+                    tile_id = tile[3]
+                    cur2.execute(
+                        """INSERT OR IGNORE INTO images (tile_data, tile_id) VALUES (?, ?);""",
+                        (sqlite3.Binary(tile_content), tile_id))
+                    cur2.execute(
+                        """INSERT OR IGNORE INTO map (zoom_level, tile_column, tile_row, tile_id) VALUES (?,?,?,?);""",
+                        (z, x, y, tile_id))
+
+            con2.commit()
+            optimize_database(con2)
+            con1.close()
+            con2.close()
+
+        else:
+
+            if not os.path.exists(os.path.join(output_path, date)):
+                # 创建路径 输出约定路径 + output_dir+ 日期
+                os.makedirs(os.path.join(output_path, date), exist_ok=True)
+
+            shutil.move(os.path.join(package_path, package_name + '.svtiles'),
+                        os.path.join(output_path, date, package_name + '.svtiles'))

+ 2 - 0
src/starearth/utils/__init__.py

@@ -0,0 +1,2 @@
+private_key = "AR50ATgaXgVAZDJJaV8tJA1mYzQ1UVdoVDFlJXdkI2dxbzYyN2VmZGdEI2M0eWVXdElwVFhVWlZYcGhlIWZnRkdkcUBkI3dkLC5iZmIhZWZ3UXZjZSNmdyRlZCR3JWVkd09g"
+dev_key = "06175b1ee8c089db707b8f537d612460"

+ 24 - 0
src/starearth/utils/general_utils.py

@@ -0,0 +1,24 @@
+import datetime
+import time
+import pytz
+
+
+def cnNow():
+    """获取时间,年月日时分秒格式"""
+    # tz = pytz.timezone('Asia/Shanghai')
+    # return tz.fromutc(datetime.datetime.now() + datetime.timedelta(0, time.altzone))
+    return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
+
+
+def cnNowPtp():
+    """获取时间,年月日时分秒格式"""
+    # tz = pytz.timezone('Asia/Shanghai')
+    # return tz.fromutc(datetime.datetime.now() + datetime.timedelta(0, time.altzone))
+    return time.strftime("%Y%m%d%H%M%S", time.localtime())
+
+
+def print_log(msg):
+    """打印日志"""
+    print(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') + ' | ' + msg)
+
+

+ 279 - 0
src/starearth/utils/mer_geo.py

@@ -0,0 +1,279 @@
+import math
+from osgeo import osr
+
+MAX_ZOOM_LEVEL = 32
+
+
+class GlobalMercator(object):
+    r"""
+    TMS Global Mercator Profile
+    ---------------------------
+
+    Functions necessary for generation of tiles in Spherical Mercator projection,
+    EPSG:3857.
+
+    Such tiles are compatible with Google Maps, Bing Maps, Yahoo Maps,
+    UK Ordnance Survey OpenSpace API, ...
+    and you can overlay them on top of base maps of those web mapping applications.
+
+    Pixel and tile coordinates are in TMS notation (origin [0,0] in bottom-left).
+
+    What coordinate conversions do we need for TMS Global Mercator tiles::
+
+         LatLon      <->       Meters      <->     Pixels    <->       Tile
+
+     WGS84 coordinates   Spherical Mercator  Pixels in pyramid  Tiles in pyramid
+         lat/lon            XY in meters     XY pixels Z zoom      XYZ from TMS
+        EPSG:4326           EPSG:387
+         .----.              ---------               --                TMS
+        /      \     <->     |       |     <->     /----/    <->      Google
+        \      /             |       |           /--------/          QuadTree
+         -----               ---------         /------------/
+       KML, public         WebMapService         Web Clients      TileMapService
+
+    What is the coordinate extent of Earth in EPSG:3857?
+
+      [-20037508.342789244, -20037508.342789244, 20037508.342789244, 20037508.342789244]
+      Constant 20037508.342789244 comes from the circumference of the Earth in meters,
+      which is 40 thousand kilometers, the coordinate origin is in the middle of extent.
+      In fact you can calculate the constant as: 2 * math.pi * 6378137 / 2.0
+      $ echo 180 85 | gdaltransform -s_srs EPSG:4326 -t_srs EPSG:3857
+      Polar areas with abs(latitude) bigger then 85.05112878 are clipped off.
+
+    What are zoom level constants (pixels/meter) for pyramid with EPSG:3857?
+
+      whole region is on top of pyramid (zoom=0) covered by 256x256 pixels tile,
+      every lower zoom level resolution is always divided by two
+      initialResolution = 20037508.342789244 * 2 / 256 = 156543.03392804062
+
+    What is the difference between TMS and Google Maps/QuadTree tile name convention?
+
+      The tile raster itself is the same (equal extent, projection, pixel size),
+      there is just different identification of the same raster tile.
+      Tiles in TMS are counted from [0,0] in the bottom-left corner, id is XYZ.
+      Google placed the origin [0,0] to the top-left corner, reference is XYZ.
+      Microsoft is referencing tiles by a QuadTree name, defined on the website:
+      http://msdn2.microsoft.com/en-us/library/bb259689.aspx
+
+    The lat/lon coordinates are using WGS84 datum, yes?
+
+      Yes, all lat/lon we are mentioning should use WGS84 Geodetic Datum.
+      Well, the web clients like Google Maps are projecting those coordinates by
+      Spherical Mercator, so in fact lat/lon coordinates on sphere are treated as if
+      the were on the WGS84 ellipsoid.
+
+      From MSDN documentation:
+      To simplify the calculations, we use the spherical form of projection, not
+      the ellipsoidal form. Since the projection is used only for map display,
+      and not for displaying numeric coordinates, we don't need the extra precision
+      of an ellipsoidal projection. The spherical projection causes approximately
+      0.33 percent scale distortion in the Y direction, which is not visually
+      noticeable.
+
+    How do I create a raster in EPSG:3857 and convert coordinates with PROJ.4?
+
+      You can use standard GIS tools like gdalwarp, cs2cs or gdaltransform.
+      All of the tools supports -t_srs 'epsg:3857'.
+
+      For other GIS programs check the exact definition of the projection:
+      More info at http://spatialreference.org/ref/user/google-projection/
+      The same projection is designated as EPSG:3857. WKT definition is in the
+      official EPSG database.
+
+      Proj4 Text:
+        +proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0
+        +k=1.0 +units=m +nadgrids=@null +no_defs
+
+      Human readable WKT format of EPSG:3857:
+         PROJCS["Google Maps Global Mercator",
+             GEOGCS["WGS 84",
+                 DATUM["WGS_1984",
+                     SPHEROID["WGS 84",6378137,298.257223563,
+                         AUTHORITY["EPSG","7030"]],
+                     AUTHORITY["EPSG","6326"]],
+                 PRIMEM["Greenwich",0],
+                 UNIT["degree",0.0174532925199433],
+                 AUTHORITY["EPSG","4326"]],
+             PROJECTION["Mercator_1SP"],
+             PARAMETER["central_meridian",0],
+             PARAMETER["scale_factor",1],
+             PARAMETER["false_easting",0],
+             PARAMETER["false_northing",0],
+             UNIT["metre",1,
+                 AUTHORITY["EPSG","9001"]]]
+    """
+
+    def __init__(self, tile_size=256):
+        """Initialize the TMS Global Mercator pyramid"""
+        self.tile_size = tile_size
+        self.initialResolution = 2 * math.pi * 6378137 / self.tile_size
+        # 156543.03392804062 for tile_size 256 pixels
+        self.originShift = 2 * math.pi * 6378137 / 2.0
+        # 20037508.342789244
+
+    def LatLonToMeters(self, lat, lon):
+        """Converts given lat/lon in WGS84 Datum to XY in Spherical Mercator EPSG:3857"""
+
+        mx = lon * self.originShift / 180.0
+        my = math.log(math.tan((90 + lat) * math.pi / 360.0)) / (math.pi / 180.0)
+
+        my = my * self.originShift / 180.0
+        return mx, my
+
+    def MetersToLatLon(self, mx, my):
+        """Converts XY point from Spherical Mercator EPSG:3857 to lat/lon in WGS84 Datum"""
+
+        lon = (mx / self.originShift) * 180.0
+        lat = (my / self.originShift) * 180.0
+
+        lat = 180 / math.pi * (2 * math.atan(math.exp(lat * math.pi / 180.0)) - math.pi / 2.0)
+        return lat, lon
+
+    def PixelsToMeters(self, px, py, zoom):
+        """Converts pixel coordinates in given zoom level of pyramid to EPSG:3857"""
+
+        res = self.Resolution(zoom)
+        mx = px * res - self.originShift
+        my = py * res - self.originShift
+        return mx, my
+
+    def MetersToPixels(self, mx, my, zoom):
+        """Converts EPSG:3857 to pyramid pixel coordinates in given zoom level"""
+
+        res = self.Resolution(zoom)
+        px = (mx + self.originShift) / res
+        py = (my + self.originShift) / res
+        return px, py
+
+    def PixelsToTile(self, px, py):
+        """Returns a tile covering region in given pixel coordinates"""
+
+        tx = int(math.ceil(px / float(self.tile_size)) - 1)
+        ty = int(math.ceil(py / float(self.tile_size)) - 1)
+        return tx, ty
+
+    def PixelsToRaster(self, px, py, zoom):
+        """Move the origin of pixel coordinates to top-left corner"""
+
+        mapSize = self.tile_size << zoom
+        return px, mapSize - py
+
+    def MetersToTile(self, mx, my, zoom):
+        """Returns tile for given mercator coordinates"""
+
+        px, py = self.MetersToPixels(mx, my, zoom)
+        return self.PixelsToTile(px, py)
+
+    def TileBounds(self, tx, ty, zoom):
+        """Returns bounds of the given tile in EPSG:3857 coordinates"""
+
+        minx, miny = self.PixelsToMeters(tx * self.tile_size, ty * self.tile_size, zoom)
+        maxx, maxy = self.PixelsToMeters((tx + 1) * self.tile_size, (ty + 1) * self.tile_size, zoom)
+        return minx, miny, maxx, maxy
+
+    def TileLatLonBounds(self, tx, ty, zoom):
+        """Returns bounds of the given tile in latitude/longitude using WGS84 datum"""
+
+        bounds = self.TileBounds(tx, ty, zoom)
+        minLat, minLon = self.MetersToLatLon(bounds[0], bounds[1])
+        maxLat, maxLon = self.MetersToLatLon(bounds[2], bounds[3])
+
+        return minLat, minLon, maxLat, maxLon
+
+    def Resolution(self, zoom):
+        """Resolution (meters/pixel) for given zoom level (measured at Equator)"""
+
+        # return (2 * math.pi * 6378137) / (self.tile_size * 2**zoom)
+        return self.initialResolution / (2 ** zoom)
+
+    def ZoomForPixelSize(self, pixelSize):
+        """Maximal scaledown zoom of the pyramid closest to the pixelSize."""
+
+        for i in range(MAX_ZOOM_LEVEL):
+            if pixelSize > self.Resolution(i):
+                return max(0, i - 1)  # We don't want to scale up
+        return MAX_ZOOM_LEVEL - 1
+
+    def GoogleTile(self, tx, ty, zoom):
+        """Converts TMS tile coordinates to Google Tile coordinates"""
+
+        # coordinate origin is moved from bottom-left to top-left corner of the extent
+        return tx, (2 ** zoom - 1) - ty
+
+    def QuadTree(self, tx, ty, zoom):
+        """Converts TMS tile coordinates to Microsoft QuadTree"""
+
+        quadKey = ""
+        ty = (2 ** zoom - 1) - ty
+        for i in range(zoom, 0, -1):
+            digit = 0
+            mask = 1 << (i - 1)
+            if (tx & mask) != 0:
+                digit += 1
+            if (ty & mask) != 0:
+                digit += 2
+            quadKey += str(digit)
+
+        return quadKey
+
+
+class NewGlobalMercator(GlobalMercator):
+    def __init__(self, origin='top-left', tile_size=256):
+        super(NewGlobalMercator, self).__init__()
+        # Initialize the TMS Global Mercator pyramid
+        self.tile_size = tile_size
+        self.initialResolution = 2 * math.pi * 6378137 / self.tile_size
+        # 156543.03392804062 for tile_size 256 pixels
+        self.originShift = 2 * math.pi * 6378137 / 2.0
+        # 20037508.342789244
+        self.origin = origin
+        if origin not in ['top-left', 'bottom-left']:
+            raise ValueError('Support only top-left and bottom-left.')
+
+    def PixelsToMeters(self, px, py, zoom):
+        """Converts pixel coordinates in given zoom level of pyramid to EPSG:3857"""
+        res = self.Resolution(zoom)
+        mx = px * res - self.originShift
+        if self.origin == 'top-left':
+            my = self.originShift - py * res
+        elif self.origin == 'bottom-left':
+            my = py * res - self.originShift
+        else:
+            return
+        return mx, my
+
+    def MetersToPixels(self, mx, my, zoom):
+        """Converts EPSG:3857 to pyramid pixel coordinates in given zoom level"""
+        res = self.Resolution(zoom)
+        px = (mx + self.originShift) / res
+        if self.origin == 'top-left':
+            py = (self.originShift - my) / res
+        elif self.origin == 'bottom-left':
+            py = (my + self.originShift) / res
+        else:
+            return
+        return px, py
+
+    def TileBounds(self, tx, ty, zoom):
+        if self.origin == 'top-left':
+            minx, miny = self.PixelsToMeters(tx * self.tile_size, (ty + 1) * self.tile_size, zoom)
+            maxx, maxy = self.PixelsToMeters((tx + 1) * self.tile_size, ty * self.tile_size, zoom)
+        elif self.origin == 'bottom-left':
+            minx, miny = self.PixelsToMeters(tx * self.tile_size, ty * self.tile_size, zoom)
+            maxx, maxy = self.PixelsToMeters((tx + 1) * self.tile_size, (ty + 1) * self.tile_size, zoom)
+        else:
+            return
+        return minx, miny, maxx, maxy
+
+    def ZoomForPixelSize(self, pixelSize):
+        # print(pixelSize)
+        for i in range(1, MAX_ZOOM_LEVEL):
+            res = self.Resolution(i)
+            pre_res = self.Resolution(i - 1)
+            if res < pixelSize < pre_res:
+                if pixelSize <= (res + pre_res) / 2:  # 返回更接近的级别
+                    return i
+                else:
+                    return i - 1
+        return MAX_ZOOM_LEVEL - 1
+

+ 21 - 0
src/starearth/utils/sql_conn.py

@@ -0,0 +1,21 @@
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.pool import NullPool
+
+Base = declarative_base()
+
+
+def conndb():
+    """数据库相关"""
+    from config     import Config
+    from connection import Connection
+
+    config = Config()
+    connection = Connection(config)
+
+    # engine = create_engine(connection.db_uri, pool_size=20, max_overflow=0)
+    engine = create_engine(connection.db_uri, poolclass=NullPool)
+    DBSession = sessionmaker(bind=engine)
+
+    return DBSession()

+ 202 - 0
src/starearth/utils/tileset.py

@@ -0,0 +1,202 @@
+import os
+import json
+from jsonschema import validate, ValidationError
+from app.utils.json_utils import JSONDict
+from app.defines import TileSetType
+
+class TileSet(object):
+    """
+    对 `tileset.json`进行建模,使用jsonschema库提供读写验证
+    """
+    Schema = \
+        {
+            "type": "object",
+            "properties":
+                {
+                    "id": {"type": "string"},
+                    "name": {"type": "string"},
+                    "description": {"type": "string"},
+                    "scheme": {"type": "string"},  # "xyz",
+                    "tilejson": {"type": "string"},  # "2.1.x",
+                    "tile_grid": {"type": "string"},  # WebMercatorQuad、WGS1984Quad、GoogleEarth
+                    "type": {"type": "string"},
+                    "version": {"type": "string"},
+                    "minzoom": {"type": "number"},
+                    "maxzoom": {"type": "number"},
+                    "bounds":
+                        {
+                            "type": "array",
+                            "items": {"type": "number", "maxItems": 4}
+                        },
+                    "center":
+                        {
+                            "type": "array",
+                            "items": {"type": "number", "maxItems": 3}
+                        },
+                    "tiles":
+                        {
+                            "type": "array",
+                            "items": {"type": "string"}
+                        },
+                    "owner": {"type": "string"},
+                    "attribution": {"type": "string"},
+                    "msmt_info":
+                        {
+                            "type": "array",
+                            "items": {"type": "object"}
+                        }
+                },
+            "required": ["id", "scheme", "tilejson", "maxzoom", "minzoom"]
+        }
+
+    def __init__(self):
+        self.id          = None
+        self.name        = None
+        self.description = None
+        self.scheme      = None
+        self.tilejson    = None
+        self.tile_grid   = None
+        self.type        = None
+        self.version     = None
+
+        self.minzoom     = None
+        self.maxzoom     = None
+        self.bounds      = None
+        self.center      = None
+        self.tiles       = None
+
+        self.owner       = None
+        self.attribution = None
+
+        self.msmt_info   = None
+
+        pass
+
+    def load(self, path):
+        """
+        从路径加载tileset.json,并构造Tileset对象
+        :param path:
+        """
+        with open(TileSet._resolve(path), 'r') as fp:
+
+            data = json.load(fp)
+            try:
+                validate(data, TileSet.Schema)
+            except ValidationError as e:
+                print(e, "please check your tileset.json file.")
+
+            self.id        = data['id'      ]
+            self.scheme    = data['scheme'  ]
+            self.tilejson  = data['tilejson']
+            self.tile_grid = data['tile_grid']
+
+            self.name        = data.get("name")
+            self.description = data.get("description")
+
+            self.type    = data.get("type")
+            self.version = data.get("description", "1.0.0")
+
+            self.minzoom = data['minzoom'    ]
+            self.maxzoom = data['maxzoom'    ]
+            self.bounds  = data.get("bounds")
+            self.center  = data.get("center")
+            self.tiles   = data.get("tiles", [])
+
+            self.owner       = data.get("owner", "mapscloud.cn")
+            self.attribution = data.get("attribution", "mapscloud.cn")
+
+            try:
+                _msmt_info = data.get("msmt_info")
+                for item in _msmt_info:
+                    TileSet.msmt_item_validate(item)
+                self.msmt_info = _msmt_info
+
+            except ValidationError as e:
+                print(e, "please check your t items in tileset.json file.")
+
+            pass
+
+        pass
+
+    def save(self, path):
+        """
+        给定地址,TileSet 对象写入文件 'tileset.json'
+        :param path:
+        """
+        with open(TileSet._resolve(path), 'w') as fp:
+            data = JSONDict()
+
+            data.id = self.id
+            data.scheme = self.scheme or "xyz"
+            data.tilejson  = self.tilejson or "2.1.x"
+            data.tile_grid = self.tile_grid or ""
+            data.name = self.name or ""
+            data.description = self.description or ""
+            data.type = self.type or TileSetType.IMAGE_TILE
+            data.version = self.version or "1.0.0"
+
+            data.minzoom = self.minzoom or 0
+            data.maxzoom = self.maxzoom or 25
+            data.bounds  = self.bounds
+            data.center  = self.center
+            data.tiles   = self.tiles or []
+            data.owner   = self.owner or "mapscloud.cn"
+
+            data.attribution = self.attribution or "mapscloud.cn"
+            data.msmt_info   = self.msmt_info or []
+
+            try:
+                validate(data, TileSet.Schema)
+            except ValidationError as e:
+                print(e, "TileSet validate error.")
+
+            json.dump(data, fp, ensure_ascii=False)
+
+            pass
+
+        pass
+
+    @staticmethod
+    def construct_from(path):
+        """
+        静态方法: 从路径加载tileset.json,并构造Tileset对象
+        :param path:
+        :return:
+        """
+        instance = TileSet()
+        instance.load(path)
+
+        return instance
+
+    @staticmethod
+    def _resolve(path):
+        if os.path.isfile(path):
+            path = os.path.dirname(path)
+
+            pass
+
+        return os.path.join(path, 'tileset.json')
+
+    @staticmethod
+    def msmt_item_validate(json_data):
+        """
+        验证每个时相及其存储
+        :param json_data:
+        """
+        msmt_item_schema = {
+            "type": "object",
+            "patternProperties": {"\d{4}-\d{1,2}-\d{1,2}": {  # 键是变化的,使用patternProperties的正则表达式描述key
+                "type": "object",
+                "properties": {
+                    "storage_type": {"type": "string"},
+                    "path": {"type": "string"},
+                },
+                "required": ["storage_type", "path"]
+            }
+            }
+        }
+
+        validate(json_data, msmt_item_schema)
+
+
+

+ 65 - 0
src/starearth/utils/timeit.py

@@ -0,0 +1,65 @@
+import time
+import json
+import os, sys
+import logging
+import traceback
+import functools
+
+
+def log(kind):
+    filepath = sys._getframe(1).f_code.co_filename
+
+    def add_log(func):
+        @functools.wraps(func)
+        def wrapper(*args, **kwargs):
+            time_cost_logger = logging.getLogger()
+            time_cost_logger.setLevel('CRITICAL')  # 设置性能测试的日志级别为 CRITICAL 级别
+            # 输出到文件
+            chlr = logging.FileHandler(kind, mode='a')
+            # 定义日志格式
+            fmt = logging.Formatter("%(asctime)s - [%(levelname)s] - %(message)s")
+
+            # 配置默认日志格式
+            chlr.setFormatter(fmt)
+            # 日志记录器添加handler
+            time_cost_logger.addHandler(chlr)
+            t1 = time.time()
+            res = func(*args, **kwargs)
+            t2 = time.time()
+            content = {}
+            try:
+
+                content['time_cost'] = round(float(t2 - t1), 3)
+                content['method'] = func.__name__
+                content['file'] = filepath.split(os.sep)[-1]
+
+                content_res = json.dumps(content)
+                time_cost_logger.critical(content_res)
+
+            except Exception as e:
+                time_cost_logger.warning('%s detail: %s' % (str(e), traceback.format_exc()))
+            return res
+
+        return wrapper
+
+    return add_log
+
+
+def pkg_name_logs(logfile, log_data):
+
+    # 第一步,创建一个logger
+    logger = logging.getLogger()
+    logger.setLevel(logging.INFO)  # Log等级总开关
+    # 第二步,创建一个handler,用于写入日志文件
+    rq = time.strftime('%Y%m%d%H%M', time.localtime(time.time()))
+
+    fh = logging.FileHandler(logfile, mode='a')
+    fh.setLevel(logging.DEBUG)  # 输出到file的log等级的开关
+    # 第三步,定义handler的输出格式
+    formatter = logging.Formatter("%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s")
+    fh.setFormatter(formatter)
+    # 第四步,将logger添加到handler里面
+    logger.addHandler(fh)
+    # 日志
+    logger.info(log_data)
+

+ 149 - 0
src/starearth/utils/utils.py

@@ -0,0 +1,149 @@
+import rasterio
+import numpy as np
+from osgeo import gdal, osr
+
+
+def create_overviews(input_file, block_size=512):
+    """
+    创建金字塔:
+    1. 如果输入文件已有内置或外置金字塔,返回overview个数;
+    2. 如果输入文件无金字塔,则创建金字塔,再返回overview个数。
+    """
+    ds = gdal.Open(input_file, gdal.GA_ReadOnly)
+    band1 = ds.GetRasterBand(1)
+    ovr_count = band1.GetOverviewCount()
+    if ovr_count != 0:
+        return ovr_count
+    width = ds.RasterXSize
+    height = ds.RasterYSize
+
+    n_level = 0
+    overview = 1
+
+    while min(width // overview, height // overview) > block_size:
+        overview *= 2
+        n_level += 1
+
+    overview_list = [2 ** j for j in range(1, n_level + 1)]
+    ds.BuildOverviews(overviewlist=overview_list)
+    return len(overview_list)
+
+
+def getRealFourCorners(datafile):
+    """获取角点坐标"""
+    overviews = create_overviews(datafile)
+    # print('overviews:', overviews)
+    with rasterio.open(datafile, overview_level=overviews - 1, mode='r') as src:
+        images = src.read(1)
+        width = src.width
+        height = src.height
+        value_arr = np.nonzero(images[0])
+        if value_arr[0].size > 0:
+            aff = src.transform
+            # print(src.bounds)
+            for i in range(0, height, 1):
+                value_arr = np.nonzero(images[i])
+                if value_arr[0].size > 0:
+                    ul_x, ul_y = aff * (value_arr[0][0], i)
+                    row_top = (ul_x, ul_y)
+                    # print ('hang----- top: ', ul_x, ul_y)
+                    break
+
+            for i in range(height - 1, 0, -1):
+                value_arr = np.nonzero(images[i])
+                if value_arr[0].size > 0:
+                    ul_x, ul_y = aff * (value_arr[0][value_arr[0].size - 1], i)
+                    row_bottom = (ul_x, ul_y)
+                    # print ('hang----- bottom: ', ul_x, ul_y)
+                    break
+
+            for i in range(width):
+                value_arr = np.nonzero(images[:, i])
+                if value_arr[0].size > 0:
+                    ul_x, ul_y = aff * (i, value_arr[0][value_arr[0].size - 1])
+                    column_left = (ul_x, ul_y)
+                    # print('shu----- left: ', ul_x, ul_y)
+                    break
+
+            for i in range(width - 1, 0, -1):
+                value_arr = np.nonzero(images[:, i])
+                if value_arr[0].size > 0:
+                    ul_x, ul_y = aff * (i, value_arr[0][0])
+                    column_right = (ul_x, ul_y)
+                    # print ('shu----- right: ', ul_x, ul_y)
+                    break
+            # bbox = src.bounds
+            # print(bbox)
+            # row_top = (bbox.left, bbox.top)
+            # row_bottom = (bbox.right, bbox.bottom)
+            # column_left = (bbox.left, bbox.bottom)
+            # column_right = (bbox.right, bbox.top)
+        else:
+            with rasterio.open(datafile, overview_level=0, mode='r') as src:
+                images = src.read(1)
+                width = src.width
+                height = src.height
+                aff = src.transform
+                # print(src.bounds)
+                for i in range(0, height, 1):
+                    value_arr = np.nonzero(images[i])
+                    if value_arr[0].size > 0:
+                        ul_x, ul_y = aff * (value_arr[0][0], i)
+                        row_top = (ul_x, ul_y)
+                        # print ('hang----- top: ', ul_x, ul_y)
+                        break
+
+                for i in range(height - 1, 0, -1):
+                    value_arr = np.nonzero(images[i])
+                    if value_arr[0].size > 0:
+                        ul_x, ul_y = aff * (value_arr[0][value_arr[0].size - 1], i)
+                        row_bottom = (ul_x, ul_y)
+                        # print ('hang----- bottom: ', ul_x, ul_y)
+                        break
+
+                for i in range(width):
+                    value_arr = np.nonzero(images[:, i])
+                    if value_arr[0].size > 0:
+                        ul_x, ul_y = aff * (i, value_arr[0][value_arr[0].size - 1])
+                        column_left = (ul_x, ul_y)
+                        # print('shu----- left: ', ul_x, ul_y)
+                        break
+
+                for i in range(width - 1, 0, -1):
+                    value_arr = np.nonzero(images[:, i])
+                    if value_arr[0].size > 0:
+                        ul_x, ul_y = aff * (i, value_arr[0][0])
+                        column_right = (ul_x, ul_y)
+                        # print ('shu----- right: ', ul_x, ul_y)
+                        break
+    return row_top, row_bottom, column_left, column_right
+
+
+def transform_point(point, from_srs_id, to_srs_id, reverse=False):
+    """经纬度、墨卡托相互转换"""
+    mer = osr.SpatialReference()
+    mer.ImportFromEPSG(from_srs_id)
+
+    geo = osr.SpatialReference()
+    geo.ImportFromEPSG(to_srs_id)
+
+    if not reverse:
+        t = osr.CoordinateTransformation(mer, geo)
+    else:
+        t = osr.CoordinateTransformation(geo, mer)
+
+    return t.TransformPoint(*point)
+
+
+def get_dem_zoom(pixelSize, dem_tile_size ,MAX_ZOOM_LEVEL=32):
+    RES_FACT = 360.0 / dem_tile_size
+
+    for i in range(1, MAX_ZOOM_LEVEL):
+        res = RES_FACT / 2 ** i
+        pre_res = RES_FACT / 2 ** (i - 1)
+        if res < pixelSize < pre_res:
+            if pixelSize <= (res + pre_res) / 2:  # 返回更接近的级别
+                return i
+            else:
+                return i - 1
+    return MAX_ZOOM_LEVEL - 1

+ 59 - 0
src/starearth/xml_to_geojsonl.py

@@ -0,0 +1,59 @@
+import os
+from osgeo import gdal
+from app.utils.create_geojsonl import verify_geotiff, create_default_geojsonl
+
+
+def write_geojsonl(geojsonl_data: dict, root_path: str):
+
+    for xml_relative_path, properties in geojsonl_data.items():
+
+        xml_path = os.path.join(root_path, xml_relative_path)
+        abs_path = os.path.dirname(xml_path)
+        filename = os.path.basename(xml_path)
+        name = filename.lower().rstrip('.xml').rstrip('.tif.xml').rstrip('.tiff.xml')
+
+        for _file in os.listdir(abs_path):
+
+            tif_path = os.path.join(abs_path, _file)
+
+            if not os.path.isfile(tif_path):
+                continue
+
+            _name, _ext = os.path.splitext(_file)
+
+            if _ext.lower() not in ['.tif', '.tiff', ] or _name.lower() != name:
+                continue
+
+            break
+
+        # tif = os.path.splitext(xml_relative_path)[0] + '.tif'
+        # input_file_path = os.path.join(root_path, tif.strip(r"\\"))
+
+        epsg = verify_geotiff(tif_path, None)
+
+        new_geojsonl = tif_path.split('.')[0] + '.geojsonl'
+
+        # 添加备用字段
+        properties["bk1"] = ''
+        properties["bk2"] = ''
+        properties["bk3"] = ''
+
+        # 增加优先级disp字段和分辨率res字段信息
+        ds = gdal.Open(tif_path)
+        res = float(ds.GetGeoTransform()[1])
+        del ds
+
+        properties['disp'] = 1
+        properties['res'] = res
+
+        # 格式化日期
+        if len(properties["date"]) < 10:
+            properties["date"] = properties["datetime"]
+
+        if '-' in properties["date"]:
+            properties["date"] = properties["date"].split(' ')[0]
+
+        if ':' not in properties["datetime"]:
+            properties["datetime"] = properties["datetime"] + " 00:00:00"
+
+        create_default_geojsonl(tif_path, new_geojsonl, properties=properties)

+ 0 - 0
src/utils/__init__.py


Некоторые файлы не были показаны из-за большого количества измененных файлов