diff --git a/.gitignore b/.gitignore index feb7369..5a1df5d 100644 --- a/.gitignore +++ b/.gitignore @@ -6,4 +6,4 @@ __temp .DS_Store docklet.conf home.html -src/migrations/ +src/utils/migrations/ diff --git a/src/com/migrations/README b/src/com/migrations/README deleted file mode 100644 index 98e4f9c..0000000 --- a/src/com/migrations/README +++ /dev/null @@ -1 +0,0 @@ -Generic single-database configuration. \ No newline at end of file diff --git a/src/com/migrations/alembic.ini b/src/com/migrations/alembic.ini deleted file mode 100644 index f8ed480..0000000 --- a/src/com/migrations/alembic.ini +++ /dev/null @@ -1,45 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/src/com/migrations/env.py b/src/com/migrations/env.py deleted file mode 100644 index 4593816..0000000 --- a/src/com/migrations/env.py +++ /dev/null @@ -1,87 +0,0 @@ -from __future__ import with_statement -from alembic import context -from sqlalchemy import engine_from_config, pool -from logging.config import fileConfig -import logging - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -fileConfig(config.config_file_name) -logger = logging.getLogger('alembic.env') - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -from flask import current_app -config.set_main_option('sqlalchemy.url', - current_app.config.get('SQLALCHEMY_DATABASE_URI')) -target_metadata = current_app.extensions['migrate'].db.metadata - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def run_migrations_offline(): - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - url = config.get_main_option("sqlalchemy.url") - context.configure(url=url) - - with context.begin_transaction(): - context.run_migrations() - - -def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - - # this callback is used to prevent an auto-migration from being generated - # when there are no changes to the schema - # reference: http://alembic.readthedocs.org/en/latest/cookbook.html - def process_revision_directives(context, revision, directives): - if getattr(config.cmd_opts, 'autogenerate', False): - script = directives[0] - if script.upgrade_ops.is_empty(): - directives[:] = [] - logger.info('No changes in schema detected.') - - engine = engine_from_config(config.get_section(config.config_ini_section), - prefix='sqlalchemy.', - poolclass=pool.NullPool) - - connection = engine.connect() - context.configure(connection=connection, - target_metadata=target_metadata, - process_revision_directives=process_revision_directives, - **current_app.extensions['migrate'].configure_args) - - try: - with context.begin_transaction(): - context.run_migrations() - finally: - connection.close() - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/src/com/migrations/script.py.mako b/src/com/migrations/script.py.mako deleted file mode 100644 index 9570201..0000000 --- a/src/com/migrations/script.py.mako +++ /dev/null @@ -1,22 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision} -Create Date: ${create_date} - -""" - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} - -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -def upgrade(): - ${upgrades if upgrades else "pass"} - - -def downgrade(): - ${downgrades if downgrades else "pass"} diff --git a/src/com/migrations/versions/37dcfdb2604_.py b/src/com/migrations/versions/37dcfdb2604_.py deleted file mode 100644 index c746d1c..0000000 --- a/src/com/migrations/versions/37dcfdb2604_.py +++ /dev/null @@ -1,116 +0,0 @@ -"""empty message - -Revision ID: 37dcfdb2604 -Revises: None -Create Date: 2018-04-22 11:58:48.307690 - -""" - -# revision identifiers, used by Alembic. -revision = '37dcfdb2604' -down_revision = None - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('apply_msg', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('username', sa.String(length=10), nullable=True), - sa.Column('number', sa.Integer(), nullable=True), - sa.Column('reason', sa.String(length=600), nullable=True), - sa.Column('status', sa.String(length=10), nullable=True), - sa.Column('time', sa.DateTime(timezone=10), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('image', - sa.Column('imagename', sa.String(length=50), nullable=True), - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('hasPrivate', sa.Boolean(), nullable=True), - sa.Column('hasPublic', sa.Boolean(), nullable=True), - sa.Column('ownername', sa.String(length=20), nullable=True), - sa.Column('create_time', sa.DateTime(), nullable=True), - sa.Column('description', sa.Text(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('v_cluster', - sa.Column('clusterid', sa.BigInteger(), autoincrement=False, nullable=False), - sa.Column('clustername', sa.String(length=50), nullable=True), - sa.Column('ownername', sa.String(length=20), nullable=True), - sa.Column('status', sa.String(length=10), nullable=True), - sa.Column('size', sa.Integer(), nullable=True), - sa.Column('nextcid', sa.Integer(), nullable=True), - sa.Column('create_time', sa.DateTime(), nullable=True), - sa.Column('start_time', sa.String(length=20), nullable=True), - sa.Column('proxy_server_ip', sa.String(length=20), nullable=True), - sa.Column('proxy_public_ip', sa.String(length=20), nullable=True), - sa.PrimaryKeyConstraint('clusterid') - ) - op.create_table('v_node', - sa.Column('name', sa.String(length=100), nullable=False), - sa.Column('laststopcpuval', sa.Float(), nullable=True), - sa.Column('laststopruntime', sa.Integer(), nullable=True), - sa.Column('billing', sa.Integer(), nullable=True), - sa.PrimaryKeyConstraint('name') - ) - op.create_table('billing_history', - sa.Column('node_name', sa.String(length=100), nullable=False), - sa.Column('vclusterid', sa.Integer(), nullable=True), - sa.Column('cpu', sa.Float(), nullable=True), - sa.Column('mem', sa.Float(), nullable=True), - sa.Column('disk', sa.Float(), nullable=True), - sa.Column('port', sa.Float(), nullable=True), - sa.ForeignKeyConstraint(['vclusterid'], ['v_cluster.clusterid'], ), - sa.PrimaryKeyConstraint('node_name') - ) - op.create_table('container', - sa.Column('containername', sa.String(length=100), nullable=False), - sa.Column('hostname', sa.String(length=30), nullable=True), - sa.Column('ip', sa.String(length=20), nullable=True), - sa.Column('host', sa.String(length=20), nullable=True), - sa.Column('image', sa.String(length=50), nullable=True), - sa.Column('lastsave', sa.DateTime(), nullable=True), - sa.Column('setting_cpu', sa.Integer(), nullable=True), - sa.Column('setting_mem', sa.Integer(), nullable=True), - sa.Column('setting_disk', sa.Integer(), nullable=True), - sa.Column('vclusterid', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['vclusterid'], ['v_cluster.clusterid'], ), - sa.PrimaryKeyConstraint('containername') - ) - op.create_table('history', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('vnode', sa.String(length=100), nullable=True), - sa.Column('action', sa.String(length=30), nullable=True), - sa.Column('runningtime', sa.Integer(), nullable=True), - sa.Column('cputime', sa.Float(), nullable=True), - sa.Column('billing', sa.Integer(), nullable=True), - sa.Column('actionTime', sa.DateTime(), nullable=True), - sa.ForeignKeyConstraint(['vnode'], ['v_node.name'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('port_mapping', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('node_name', sa.String(length=100), nullable=True), - sa.Column('node_ip', sa.String(length=20), nullable=True), - sa.Column('node_port', sa.Integer(), nullable=True), - sa.Column('host_port', sa.Integer(), nullable=True), - sa.Column('vclusterid', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['vclusterid'], ['v_cluster.clusterid'], ), - sa.PrimaryKeyConstraint('id') - ) - ### end Alembic commands ### - - -def downgrade(): - ### commands auto generated by Alembic - please adjust! ### - op.drop_table('port_mapping') - op.drop_table('history') - op.drop_table('container') - op.drop_table('billing_history') - op.drop_table('v_node') - op.drop_table('v_cluster') - op.drop_table('image') - op.drop_table('apply_msg') - ### end Alembic commands ### diff --git a/src/com/env.py b/src/utils/env.py similarity index 100% rename from src/com/env.py rename to src/utils/env.py diff --git a/src/com/etcdlib.py b/src/utils/etcdlib.py similarity index 100% rename from src/com/etcdlib.py rename to src/utils/etcdlib.py diff --git a/src/com/imagemgr.py b/src/utils/imagemgr.py similarity index 99% rename from src/com/imagemgr.py rename to src/utils/imagemgr.py index 86f66fb..fc6a2bb 100755 --- a/src/com/imagemgr.py +++ b/src/utils/imagemgr.py @@ -20,10 +20,10 @@ from configparser import ConfigParser from io import StringIO import os,sys,subprocess,time,re,datetime,threading,random import xmlrpc.client -from com.model import db, Image +from utils.model import db, Image -from com.log import logger -from com import env, updatebase +from utils.log import logger +from utils import env, updatebase from worker.lvmtool import * import requests diff --git a/src/com/log.py b/src/utils/log.py similarity index 99% rename from src/com/log.py rename to src/utils/log.py index 46568e1..721776e 100755 --- a/src/com/log.py +++ b/src/utils/log.py @@ -6,7 +6,7 @@ import argparse import sys import time # this is only being used as part of the example import os -from com import env +from utils import env # logger should only be imported after initlogging has been called logger = None diff --git a/src/com/logs.py b/src/utils/logs.py similarity index 100% rename from src/com/logs.py rename to src/utils/logs.py diff --git a/src/com/manage.py b/src/utils/manage.py similarity index 100% rename from src/com/manage.py rename to src/utils/manage.py diff --git a/src/com/model.py b/src/utils/model.py similarity index 99% rename from src/com/model.py rename to src/utils/model.py index fcc1ac9..250ad10 100755 --- a/src/com/model.py +++ b/src/utils/model.py @@ -34,7 +34,7 @@ import os, json from itsdangerous import TimedJSONWebSignatureSerializer as Serializer from itsdangerous import SignatureExpired, BadSignature -from com import env +from utils import env fsdir = env.getenv('FS_PREFIX') diff --git a/src/com/nettools.py b/src/utils/nettools.py similarity index 99% rename from src/com/nettools.py rename to src/utils/nettools.py index 98621b3..4f9e7f8 100755 --- a/src/com/nettools.py +++ b/src/utils/nettools.py @@ -1,8 +1,8 @@ #!/usr/bin/python3 import subprocess, threading -from com.log import logger -from com import env +from utils.log import logger +from utils import env class ipcontrol(object): @staticmethod diff --git a/src/com/proxytool.py b/src/utils/proxytool.py similarity index 97% rename from src/com/proxytool.py rename to src/utils/proxytool.py index 69b071c..c3c2032 100755 --- a/src/com/proxytool.py +++ b/src/utils/proxytool.py @@ -1,7 +1,7 @@ #!/usr/bin/python3 import requests, json -from com import env +from utils import env proxy_api_port = env.getenv("PROXY_API_PORT") proxy_control="http://localhost:"+ str(proxy_api_port) +"/api/routes" diff --git a/src/com/tools.py b/src/utils/tools.py similarity index 100% rename from src/com/tools.py rename to src/utils/tools.py diff --git a/src/com/updatebase.py b/src/utils/updatebase.py similarity index 98% rename from src/com/updatebase.py rename to src/utils/updatebase.py index 7c54e4e..692db59 100755 --- a/src/com/updatebase.py +++ b/src/utils/updatebase.py @@ -1,7 +1,7 @@ #!/usr/bin/python3 import os, shutil -from com.log import logger +from utils.log import logger def aufs_remove(basefs): try: @@ -13,7 +13,7 @@ def aufs_remove(basefs): logger.error(e) def aufs_clean(basefs): - # clean the aufs mark + # clean the aufs mark allfiles = os.listdir(basefs) for onefile in allfiles: if onefile[:4] == ".wh.": diff --git a/src/worker/container.py b/src/worker/container.py index 203e150..7f2d977 100755 --- a/src/worker/container.py +++ b/src/worker/container.py @@ -1,8 +1,8 @@ #!/usr/bin/python3 import subprocess, os, json -from com.log import logger -from com import env, imagemgr +from utils.log import logger +from utils import env, imagemgr from worker.lvmtool import sys_run, check_volume from worker.monitor import Container_Collector, History_Manager import lxc diff --git a/src/worker/lvmtool.py b/src/worker/lvmtool.py index 7a83933..ce4626c 100755 --- a/src/worker/lvmtool.py +++ b/src/worker/lvmtool.py @@ -1,8 +1,8 @@ #!/usr/bin/python3 import subprocess,os,time -from com.log import logger -from com import env +from utils.log import logger +from utils import env def sys_run(command,check=False): Ret = subprocess.run(command, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, shell=True, check=check) diff --git a/src/worker/monitor.py b/src/worker/monitor.py index c0793b1..0291b1a 100755 --- a/src/worker/monitor.py +++ b/src/worker/monitor.py @@ -19,13 +19,13 @@ Design:Monitor mainly consists of three parts: Collectors, Master_Collector and import subprocess,re,os,psutil,math,sys import time,threading,json,traceback,platform -from com import env, etcdlib +from utils import env, etcdlib import lxc import xmlrpc.client from datetime import datetime -from com.model import db,VNode,History,BillingHistory,VCluster,PortMapping -from com.log import logger +from utils.model import db,VNode,History,BillingHistory,VCluster,PortMapping +from utils.log import logger from httplib2 import Http from urllib.parse import urlencode diff --git a/src/worker/worker.py b/src/worker/worker.py index a6305b3..7121090 100755 --- a/src/worker/worker.py +++ b/src/worker/worker.py @@ -4,22 +4,22 @@ import sys if sys.path[0].endswith("worker"): sys.path[0] = sys.path[0][:-6] -from com import env, tools +from utils import env, tools config = env.getenv("CONFIG") #config = "/opt/docklet/local/docklet-running.conf" tools.loadenv(config) # must import logger after initlogging, ugly -from com.log import initlogging +from utils.log import initlogging initlogging("docklet-worker") -from com.log import logger +from utils.log import logger import xmlrpc.server, sys, time from socketserver import ThreadingMixIn import threading -from com import etcdlib, proxytool +from utils import etcdlib, proxytool from worker import container, monitor -from com.nettools import netcontrol,ovscontrol,portcontrol +from utils.nettools import netcontrol,ovscontrol,portcontrol from worker.lvmtool import new_group, recover_group from master import network @@ -179,7 +179,7 @@ class Worker(object): netcontrol.new_bridge('docklet-br') else: if not netcontrol.bridge_exists('docklet-br'): - logger.error("docklet-br not found") + utils logger.error("docklet-br not found") sys.exit(1) logger.info ("setup GRE tunnel to master %s" % self.master) #network.netsetup("gre", self.master)