Change config model

This commit is contained in:
ashatora
2024-06-10 08:09:10 +03:00
parent 6600d9797f
commit 056b91c77f
6 changed files with 54 additions and 216 deletions

View File

@@ -20,10 +20,9 @@ from .reqs_graphql import get_catalog, get_object
import xml.etree.ElementTree as ET
from .reqs.request_xml_service import RequestXmlService
import zipfile
from .config import Config
from .config import config
from .zip import Zip
import boto3
import request_itv.db as db
from sqlalchemy.orm import Session
from fastapi import FastAPI, Response, Form, UploadFile, File, Request
from fastapi.middleware.cors import CORSMiddleware
@@ -45,9 +44,9 @@ logger = logging.getLogger('xmlrpcserver')
def s3_connection():
return boto3.client('s3', endpoint_url=Config.s3_endpoint,
aws_access_key_id=Config.s3_key_id,
aws_secret_access_key=Config.s3_access_key)
return boto3.client('s3', endpoint_url=config.default.REQUEST_ITV.s3.endpoint,
aws_access_key_id=config.default.REQUEST_ITV.s3.key_id,
aws_secret_access_key=config.default.REQUEST_ITV.s3.access_key)
def download_file(key: str, bucket: str, filename: str):
@@ -92,22 +91,22 @@ def send_response(params, files, url):
files_s3 = []
for file in files:
fn = uuid4().hex
upload_file(file['url'], fn, Config.s3_bucket_itv)
file['url'] = {'name': fn, 'bucket': Config.s3_bucket_itv}
upload_file(file['url'], fn, config.default.REQUEST_ITV.s3.bucket_itv)
file['url'] = {'name': fn, 'bucket': config.default.REQUEST_ITV.s3.bucket_itv}
files_s3.append(file)
data = {
'params': params,
'files': files_s3,
'url': url,
}
channel_send.basic_publish(exchange=Config.rabbit_out_exchange, body=json.dumps(data), routing_key='')
channel_send.basic_publish(exchange=config.default.REQUEST_ITV.amqp.out_exchange, body=json.dumps(data), routing_key='')
def pika_task():
global connection
global channel_receive
channel_receive = connection.channel()
channel_receive.basic_consume(queue=Config.rabbit_incoming_queue, on_message_callback=pika_callback)
channel_receive.basic_consume(queue=config.default.REQUEST_ITV.amqp.incoming_queue, on_message_callback=pika_callback)
channel_receive.start_consuming()
@@ -170,7 +169,7 @@ def get_objects(params, files, url):
for file in obj['properties'].get('c1000', []):
if not main_filename:
main_filename = file['fileName']
res = requests.get(Config.gql_download, params={'item_id': file["key"]})
res = requests.get(config.default.REQUEST_ITV.gql.download, params={'item_id': file["key"]})
zipf.writestr(f'{main_filename}/{file["fileName"]}', res.content)
zipf.close()
response_files = [{'name': filename, 'url': filepath, 'size': os.path.getsize(filepath)}]
@@ -232,9 +231,11 @@ def put_object(params, files, url):
req = ET.fromstring(params['query_data'])
obj = req.find('chart')
class_id = obj.get('Class')
con = OOConnectionParams(Config.oodb_schema, Config.oodb_host, Config.oodb_port, Config.oodb_dbname,
Config.oodb_username, Config.oodb_passwd, Config.oodb_schema)
ws = OODBWorkspace.ws(Config.oodb_schema)
con = OOConnectionParams(config.default.REQUEST_ITV.oodb.schema, config.default.REQUEST_ITV.oodb.host,
config.default.REQUEST_ITV.oodb.port, config.default.REQUEST_ITV.oodb.dbname,
config.default.REQUEST_ITV.oodb.username, config.default.REQUEST_ITV.oodb.passwd,
config.default.REQUEST_ITV.oodb.schema)
ws = OODBWorkspace.ws(config.default.REQUEST_ITV.oodb.schema)
if not ws.isInit():
res = ws.init(con)
logger.warning(res)
@@ -262,9 +263,9 @@ def put_object(params, files, url):
key = uuid4().hex
fileVal.fileName = variantToString(item.relative_to(dir.name))
fileVal.key = variantToString(key)
fileVal.bucket = variantToString(Config.s3_bucket)
fileVal.bucket = variantToString(config.default.REQUEST_ITV.s3.bucket)
res &= feature.addAttribute('c1000', variantFromFileValue(fileVal))
upload_file(str(item), key, Config.s3_bucket)
upload_file(str(item), key, config.default.REQUEST_ITV.s3.bucket)
ws.transaction()
res = ws.save()
@@ -288,7 +289,7 @@ def main():
global server
global channel_send
connection = pika.BlockingConnection(pika.URLParameters(Config.rabbit_conn))
connection = pika.BlockingConnection(pika.URLParameters(config.default.REQUEST_ITV.amqp.conn))
channel_send = connection.channel()
logger.setLevel(logging.INFO)

View File

@@ -1,32 +1,3 @@
from bestconfig import Config
class Config:
ret_path: str = 'http://10.10.8.83:32200/'
self_bnd: str = 'bnd127'
pg_host: str = '10.10.8.83'
pg_port: int = 32101
pg_dbname: str = 'db'
pg_username: str = 'postgres'
pg_password: str = 'Root12345678'
oodb_host: str = '10.10.8.83'
oodb_port: int = 32100
oodb_dbname: str = 'db'
oodb_username: str = 'postgres'
oodb_passwd: str = 'Root12345678'
oodb_schema: str = 'documents_src'
rabbit_conn: str = 'amqp://user:password@10.10.8.83:31005/%2f'
rabbit_queue: str = 'ipd'
rabbit_incoming_queue: str = 'ipd_queue_requests'
rabbit_out_exchange: str = 'ipd_out_itv'
s3_endpoint: str = 'http://10.10.8.83:31006'
s3_key_id: str = 's57'
s3_access_key: str = 'd9MMinLF3U8TLSj'
s3_bucket: str = 'files'
s3_bucket_itv: str = 'itv'
gql_url: str = 'https://gql.ivazh.ru/graphql'
gql_download: str = 'https://gql.ivazh.ru/item/{key}'
gql_schema: str = 'pdim'
config = Config()

31
request_itv/config.yaml Normal file
View File

@@ -0,0 +1,31 @@
version: 0.0.1
default:
REQUEST_ITV:
ret_path: 'http://10.10.8.83:32200/'
self_bnd: 'bnd127'
oodb:
host: '10.10.8.83'
port: 32100
dbname: 'db'
username: 'postgres'
passwd: 'Root12345678'
schema: 'documents_src'
amqp:
conn: 'amqp://user:password@10.10.8.83:31005/%2f'
queue: 'ipd'
incoming_queue: 'ipd_queue_requests'
out_exchange: 'ipd_out_itv'
s3:
endpoint: 'http://10.10.8.83:31006'
key_id: 's57'
access_key: 'd9MMinLF3U8TLSj'
bucket: 'files'
bucket_itv: 'itv'
gql:
url: 'https://gql.ivazh.ru/graphql'
download: 'https://gql.ivazh.ru/item/{key}'
schema: 'pdim'

View File

@@ -1,166 +0,0 @@
from datetime import datetime
from typing import List, Optional
from sqlalchemy import create_engine, String, select, ForeignKey, Enum
from sqlalchemy.orm import Session, DeclarativeBase, Mapped, mapped_column, relationship
from .config import Config
def tow(day: int, hour: int, minute: int):
return minute + hour * 60 + day * 60 * 24
class Base(DeclarativeBase):
pass
class User(Base):
__tablename__ = 'users'
id: Mapped[int] = mapped_column(primary_key=True)
username: Mapped[str]
passwd: Mapped[str]
bndname: Mapped[str]
newbnd: Mapped[bool]
active: Mapped[bool]
upstream: Mapped[bool]
profiles: Mapped[List['Profile']] = relationship(
back_populates='user', cascade='all, delete-orphan'
)
income_branches: Mapped[List['IncomeBranch']] = relationship(
back_populates='user', cascade='all, delete-orphan'
)
schedule: Mapped[List['Schedule']] = relationship(
back_populates='user', cascade='all, delete-orphan'
)
queue: Mapped[List['Queue']] = relationship(
back_populates='user', cascade='all, delete-orphan'
)
def __repr__(self) -> str:
return f'User(id={self.id!r}, username={self.username!r}, password={self.passwd!r}, newbnd={self.newbnd})'
def to_dict(self) -> dict:
return {
'id': self.id,
'username': self.username,
'bndname': self.bndname,
'newbnd': self.newbnd,
'active': self.active,
'upstream': self.upstream,
'profiles': [x.to_dict() for x in self.profiles],
'schedule': [x.to_dict() for x in self.schedule],
'queue': [x.to_dict() for x in self.queue],
'income_branches': [x.to_dict() for x in self.income_branches],
}
def is_active_now(self):
if not len(self.schedule):
return True
dt = datetime.now()
curr_tow = tow(dt.weekday(), dt.hour, dt.minute)
for x in self.schedule:
if (tow(x.day_start, x.hour_start, x.minute_start) <= curr_tow
<= tow(x.day_end, x.hour_end, x.minute_end)):
return True
return False
class Profile(Base):
__tablename__ = 'profiles'
id: Mapped[int] = mapped_column(primary_key=True)
user_id: Mapped[int] = mapped_column(ForeignKey('users.id'))
scheme: Mapped[str]
branch: Mapped[str] = mapped_column(String, nullable=True)
json: Mapped[str] = mapped_column(String, nullable=True)
no_files: Mapped[bool]
user: Mapped['User'] = relationship(back_populates='profiles')
def to_dict(self) -> dict:
return {
'id': self.id,
'scheme': self.scheme,
'branch': self.branch,
'json': self.json,
}
class IncomeBranch(Base):
__tablename__ = 'income_branches'
id: Mapped[int] = mapped_column(primary_key=True)
user_id: Mapped[int] = mapped_column(ForeignKey('users.id'))
scheme: Mapped[str]
branch: Mapped[str]
local_scheme: Mapped[str]
user: Mapped['User'] = relationship(back_populates='income_branches')
def to_dict(self) -> dict:
return {
'id': self.id,
'scheme': self.scheme,
'branch': self.branch,
'local_scheme': self.local_scheme,
}
class Schedule(Base):
__tablename__ = 'schedule'
id: Mapped[int] = mapped_column(primary_key=True)
user_id: Mapped[int] = mapped_column(ForeignKey('users.id'))
day_start: Mapped[int]
hour_start: Mapped[int]
minute_start: Mapped[int]
day_end: Mapped[int]
hour_end: Mapped[int]
minute_end: Mapped[int]
user: Mapped['User'] = relationship(back_populates='schedule')
def to_dict(self) -> dict:
return {
'id': self.id,
'day_start': self.day_start,
'hour_start': self.hour_start,
'minute_start': self.minute_start,
'day_end': self.day_end,
'hour_end': self.hour_end,
'minute_end': self.minute_end,
}
class Queue(Base):
__tablename__ = 'queue'
id: Mapped[int] = mapped_column(primary_key=True)
user_id: Mapped[int] = mapped_column(ForeignKey('users.id'))
commit_id: Mapped[str]
schema: Mapped[str]
user: Mapped['User'] = relationship(back_populates='queue')
def to_dict(self) -> dict:
return {
'id': self.id,
'commit_id': self.commit_id,
'schema': self.schema,
}
class Schemas(Base):
__tablename__ = 'schemas'
id: Mapped[int] = mapped_column(primary_key=True)
schema: Mapped[str]
schema_type: Mapped[str]
def connect_db():
return create_engine(f"postgresql+psycopg://{Config.pg_username}:{Config.pg_password}@{Config.pg_host}:{Config.pg_port}/{Config.pg_dbname}")

View File

@@ -1,9 +1,9 @@
from gql import gql, Client
from gql.transport.aiohttp import AIOHTTPTransport
from .config import Config
from .config import config
transport = AIOHTTPTransport(url=Config.gql_url)
service = Config.gql_schema
transport = AIOHTTPTransport(url=config.default.REQUEST_ITV.gql.url)
service = config.default.REQUEST_ITV.gql.schema
def get_classifier():