diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..4e14cfd --- /dev/null +++ b/Dockerfile @@ -0,0 +1,10 @@ +FROM reg.ivazh.ru/infra-oodb +WORKDIR /app +COPY query_itv ./query_itv +COPY requirements.txt ./ +RUN pip3 install -r requirements.txt && \ + mkdir -p /opt/tnt/bin && \ + ln -s /usr/bin/python3 /opt/tnt/bin/python3 +ENV LD_LIBRARY_PATH "/app" +ENV PYTHONPATH "${PYTHONPATH}:/app" +CMD ["python3", "-m", "query_itv"] \ No newline at end of file diff --git a/query_itv/__main__.py b/query_itv/__main__.py index bd391fd..f49825a 100644 --- a/query_itv/__main__.py +++ b/query_itv/__main__.py @@ -1,4 +1,5 @@ -from tempfile import TemporaryDirectory +from json import JSONDecodeError +from tempfile import TemporaryDirectory, gettempdir from typing import Optional, Any import pika @@ -40,7 +41,8 @@ tasks = Queue() connected = set() connection: Optional[pika.BlockingConnection] = None -channel: Optional[Any] = None +channel_send: Optional[Any] = None +channel_receive: Optional[Any] = None server: Optional[SimpleXMLRPCServer] = None logger = logging.getLogger('xmlrpcserver') @@ -66,14 +68,46 @@ def upload_file(filename: str, key: str, bucket: str): client.put_object(Body=f.read(), Bucket=bucket, Key=key) -def get_branch(bndname: str, scheme: str): - conn = db.connect_db() - with Session(conn) as session: - item = session.query(db.IncomeBranch).filter_by(scheme=scheme).join( - db.User).filter_by(bndname=bndname).one_or_none() - if item: - return item.branch, item.local_scheme - return None, None +def pika_callback(ch, method, properties, body): + try: + data = json.loads(body) + params = data['params'] + url = data['url'] + files = [] + for file in data['files']: + fn = os.path.join(gettempdir(), uuid4().hex) + download_file(file['url']['name'], file['url']['bucket'], fn) + file['url'] = fn + files.append(file) + run_task(params['query_type'], params, files, url) + except JSONDecodeError as e: + logging.warning(e) + finally: + ch.basic_ack(delivery_tag=method.delivery_tag) + + +def send_response(params, files, url): + global channel_send + files_s3 = [] + for file in files: + fn = uuid4().hex + upload_file(file['url'], fn, Config.s3_bucket) + file['url'] = {'name': fn, 'bucket': Config.s3_bucket} + files_s3.append(file) + data = { + 'params': params, + 'files': files_s3, + 'url': url, + } + channel_send.basic_publish(exchange=Config.rabbit_out_exchange, body=json.dumps(data), routing_key='') + + +def pika_task(): + global connection + global channel_receive + channel_receive = connection.channel() + channel_receive.basic_consume(queue=Config.rabbit_incoming_queue, on_message_callback=pika_callback) + channel_receive.start_consuming() def run_tasks(): @@ -160,8 +194,7 @@ def get_data(params, files, url): filename = fn + '.zip' filepath = os.path.join(os.getcwd(), 'tmp', filename) response_files = [{'name': filename, 'url': filepath, 'size': os.path.getsize(filepath)}] - proxy = ServerProxy(url) - proxy.send(response_params, response_files, Config.ret_path) + send_response(response_params, response_files, url) def receive_data(params, files, url): @@ -200,29 +233,23 @@ def receive_data(params, files, url): def run_task(query_type, params, files, url): if query_type == NEW_DATA_REQUEST: - tasks.put(lambda: get_data(params, files, url)) + get_data(params, files, url) if query_type == NEW_DATA_RESPONSE: - tasks.put(lambda: receive_data(params, files, url)) + receive_data(params, files, url) def main(): global connection global server - global channel + global channel_send + + connection = pika.BlockingConnection(pika.URLParameters(Config.rabbit_conn)) + channel_send = connection.channel() logger.setLevel(logging.INFO) logger.warning('Use Control-C to exit') - thread = threading.Thread(target=run_tasks) - thread.start() - - try: - logger.warning('Start server') - uvicorn.run(app, host="0.0.0.0", port=8000) - except KeyboardInterrupt: - logger.warning('Exiting') - finally: - server.server_close() + pika_task() def vers_key(e): diff --git a/query_itv/config.py b/query_itv/config.py index 42ecb72..2c1a479 100644 --- a/query_itv/config.py +++ b/query_itv/config.py @@ -20,19 +20,13 @@ class Config: rabbit_conn: str = 'amqp://user:password@10.10.8.83:31005/%2f' rabbit_queue: str = 'ipd' - - ws_rabbit_params: dict = { - 'host': '10.10.8.83', - 'port': 31005, - 'exchange': 'ipd', - 'user': 'user', - 'password': 'password', - } + rabbit_incoming_queue: str = 'ipd_queue_queries' + rabbit_out_exchange: str = 'ipd_out_itv' s3_endpoint: str = 'http://10.10.8.83:31006' s3_key_id: str = 's57' s3_access_key: str = 'd9MMinLF3U8TLSj' - s3_bucket: str = 'files' + s3_bucket: str = 'itv' gql_url: str = 'https://gql.ivazh.ru/graphql' gql_download: str = 'https://gql.ivazh.ru/item/{key}' diff --git a/query_itv/config/response2.json b/query_itv/config/response2.json new file mode 100644 index 0000000..d1121cb --- /dev/null +++ b/query_itv/config/response2.json @@ -0,0 +1,12 @@ +{ + "filestorage": { + "type": "s3", + "endpoint": "http://10.10.8.83:31006", + "key_id": "s57", + "access_key": "d9MMinLF3U8TLSj", + "download_path": "/tmp" + }, + "file_code": "c1000", + "name_code": "c122", + "use_version": true +} \ No newline at end of file diff --git a/query_itv/config/workspaces.json b/query_itv/config/workspaces.json new file mode 100644 index 0000000..cdd79f5 --- /dev/null +++ b/query_itv/config/workspaces.json @@ -0,0 +1,167 @@ +{ + "databases": { + "oodb_git": { + "host": "10.10.8.83", + "port": 32100, + "database": "db", + "user": "postgres", + "password": "Root12345678" + } + }, + "workspaces": { + "documents_src": { + "type": "documents", + "group": "src", + "database": "oodb_git", + "schema": "documents_src", + "alias": "Документы исходная" + }, + "documents_standard": { + "type": "documents", + "group": "order", + "database": "oodb_git", + "schema": "documents_standard", + "alias": "Документы эталон" + }, + "documents_standard_pub": { + "type": "documents", + "group": "order", + "database": "oodb_git", + "schema": "documents_standard", + "alias": "Документы публичная" + }, + "ood": { + "type": "npd", + "group": "src", + "database": "oodb_git", + "schema": "ood", + "alias": "ООБД исходные НПД", + "map_service": "VUE_APP_GISAIS_URL:/styles/ood/style.json" + }, + "oodb": { + "type": "oodb", + "group": "order", + "database": "oodb_git", + "schema": "kartap", + "alias": "ООДБ эталон", + "map_service": "VUE_APP_GISAIS_URL:/styles/oodb/style.json" + }, + "oodb_standard": { + "type": "oodb", + "group": "forming_standard", + "database": "oodb_git", + "schema": "kartap", + "alias": "ООДБ эталон", + "map_service": "VUE_APP_GISAIS_URL:/styles/oodb/style.json" + }, + "oodb_working": { + "type": "oodb", + "group": "forming_work", + "database": "oodb_git", + "schema": "kartap", + "alias": "ООДБ рабочая", + "map_service": "VUE_APP_GISAIS_URL:/styles/oodb_tech/style.json" + }, + "oodb_pub": { + "type": "oodb", + "group": "order", + "database": "oodb_git", + "schema": "kartap", + "alias": "ООБД публичная", + "map_service": "VUE_APP_GISAIS_URL:/styles/oodb/style.json" + }, + "regions": { + "type": "regions", + "database": "oodb_git", + "schema": "regions_hard", + "alias": "Регионы", + "map_service": "VUE_APP_GISAIS_URL_GK:/styles/regions/style.json" + }, + "regions_contour": { + "type": "regions", + "database": "oodb_git", + "schema": "regions_hard", + "alias": "Регионы", + "map_service": "VUE_APP_GISAIS_URL_GK:/styles/regions_contour/style.json" + }, + "npd_9": { + "type": "npd", + "database": "oodb_git", + "schema": "npd_9", + "alias": "НПД 9.0" + }, + "npd": { + "type": "npd", + "database": "oodb_git", + "schema": "initial", + "alias": "НПД 9.0" + }, + "npd_831": { + "type": "npd", + "group": "order", + "database": "oodb_git", + "schema": "npd_831", + "alias": "НПД 8.31" + }, + "updater_test": { + "type": "npd", + "group": "order", + "database": "oodb_git", + "schema": "npd_831_test", + "alias": "НПД 8.31 публичная" + }, + "lukoil": { + "type": "oodb", + "database": "oodb_git", + "schema": "lukoil", + "alias": "ЛУКОЙЛ", + "map_service": "VUE_APP_GISAIS_URL_GK:/styles/lukoil/style.json" + }, + "geocover": { + "type": "ecpz", + "group": "order", + "database": "oodb_git", + "schema": "coverage", + "alias": "ЕЦПЗ" + }, + "geocover_test": { + "type": "ecpz", + "database": "oodb_git", + "schema": "coverage", + "alias": "ЕЦПЗ тест" + }, + "gcmr": { + "type": "gcmr", + "group": "order", + "database": "oodb_git", + "schema": "gcmr", + "alias": "ГЦМР" + }, + "orders": { + "type": "system", + "database": "oodb_git", + "schema": "orders", + "alias": "Заказы" + }, + "ilo": { + "type": "system", + "database": "oodb_git", + "schema": "ilo", + "alias": "ИЛО" + }, + "raz_sgok": { + "type": "raz_sgok", + "database": "razsgok", + "schema": "razsgok", + "alias": "СГОК", + "map_service": "VUE_APP_GISAIS_URL_GK:/styles/raz_sgok/style.json" + }, + "raz_vtu": { + "type": "raz_vtu", + "database": "razvtu", + "schema": "razvtu", + "alias": "ВТУ", + "map_service": "VUE_APP_GISAIS_URL_GK:/styles/raz_vtu/style.json" + } + } +} diff --git a/query_itv/db.py b/query_itv/db.py index bc6833a..f50b9c0 100644 --- a/query_itv/db.py +++ b/query_itv/db.py @@ -2,7 +2,7 @@ from datetime import datetime from typing import List, Optional from sqlalchemy import create_engine, String, select, ForeignKey, Enum from sqlalchemy.orm import Session, DeclarativeBase, Mapped, mapped_column, relationship -from config import Config +from .config import Config def tow(day: int, hour: int, minute: int): diff --git a/requirements.txt b/requirements.txt index 8ecf94b..ec860b8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -25,7 +25,6 @@ pika-stubs==0.1.3 psycopg==3.1.10 pydantic==2.3.0 pydantic_core==2.6.3 -pygost==5.12 python-dateutil==2.8.2 python-multipart==0.0.6 requests==2.31.0