bugfix
This commit is contained in:
parent
b4112adde7
commit
fb8153cc3a
113
README.md
113
README.md
@ -23,7 +23,7 @@ SQLOR is a database api for python3, it is base on the python's DBAPI2
|
|||||||
* clickhouse(clickhouse-connect)
|
* clickhouse(clickhouse-connect)
|
||||||
* Other driver can be easy integreated
|
* Other driver can be easy integreated
|
||||||
|
|
||||||
## Support Database Types
|
## Supported Database Types
|
||||||
* oracle
|
* oracle
|
||||||
* mysql, mariadb
|
* mysql, mariadb
|
||||||
* TiDB
|
* TiDB
|
||||||
@ -83,50 +83,51 @@ password in json data is encrypted by aes.
|
|||||||
|
|
||||||
## Using
|
## Using
|
||||||
|
|
||||||
|
### sqlor setup
|
||||||
|
First, Specified a server_path folder, under the server_path folder, need a named by "conf" subfolder.
|
||||||
|
and a config.json file must be in the conf.
|
||||||
|
|
||||||
|
the config.json need "password_key" and "databases" attributes, like:
|
||||||
```
|
```
|
||||||
import asyncio
|
{
|
||||||
|
.......
|
||||||
from sqlor.dbpools import DBPools, sqlorContext
|
"password_key":"tfyugihjo245g7g642yubv24g534",
|
||||||
|
"databases":{
|
||||||
dbs={
|
.......
|
||||||
"mydb":{
|
"mydb":{
|
||||||
"driver":"mysql",
|
"driver":"mysql",
|
||||||
"kwargs":{
|
"kwargs":{
|
||||||
"user":"test",
|
"user":"test",
|
||||||
"db":"cfae",
|
"db":"database_name_in_your_database_engine",
|
||||||
"password":"test123",
|
"password":"encoded_password_string",
|
||||||
"host":"localhost"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"stock":{
|
|
||||||
"driver":"aiopg",
|
|
||||||
"async_mode":True,
|
|
||||||
"codeing":"utf-8",
|
|
||||||
"dbname":"stock",
|
|
||||||
"kwargs":{
|
|
||||||
"dbname":"stock",
|
|
||||||
"user":"test",
|
|
||||||
"password":"test123",
|
|
||||||
"host":"127.0.0.1"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"cfae":{
|
|
||||||
"driver":"mysql.connector",
|
|
||||||
"coding":"utf8",
|
|
||||||
"dbname":"cfae",
|
|
||||||
"kwargs":{
|
|
||||||
"user":"test",
|
|
||||||
"db":"cfae",
|
|
||||||
"password":"test123",
|
|
||||||
"host":"localhost"
|
"host":"localhost"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
### generates encoded password string
|
||||||
|
sqlor has a dbpassword script let you generate the "encoded_password_string" in config.json
|
||||||
|
dbpassword usage:
|
||||||
|
```
|
||||||
|
dbpassword server_path database_password_of_user
|
||||||
|
```
|
||||||
|
|
||||||
loop = asyncio.get_event_loop()
|
### script to use sqlor
|
||||||
pool = DBPools(dbs,loop=loop)
|
|
||||||
|
```
|
||||||
|
import asyncio
|
||||||
|
from appPublic.worker import get_event_loop
|
||||||
|
from appPublic.jsonConfig import getConfig
|
||||||
|
from sqlor.dbpools import DBPools, sqlorContext
|
||||||
|
|
||||||
|
loop = get_event_loop()
|
||||||
|
config = getConfig(server_path)
|
||||||
|
pool = DBPools(config.databases, loop=loop)
|
||||||
|
|
||||||
async def testfunc():
|
async def testfunc():
|
||||||
|
dbname = 'mydb'
|
||||||
|
db = DBPools()
|
||||||
async with sqlorContext('stock') as sor:
|
async with sqlorContext('stock') as sor:
|
||||||
# start a transaction
|
# start a transaction
|
||||||
# if exception happended, all change to database will rollback
|
# if exception happended, all change to database will rollback
|
||||||
@ -164,6 +165,38 @@ async def testfunc():
|
|||||||
loop.run_until_complete(testfunc())
|
loop.run_until_complete(testfunc())
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## scripts
|
||||||
|
### dbpassword
|
||||||
|
generate a encoded password for the password of the user of database
|
||||||
|
|
||||||
|
* Syntax
|
||||||
|
|
||||||
|
dbpassword server_path password_text
|
||||||
|
|
||||||
|
1 server_path is folder where server_path/conf/config.json file have specifies password_key and databases
|
||||||
|
|
||||||
|
2 password_text is the password of the specified user of database
|
||||||
|
|
||||||
|
* description
|
||||||
|
|
||||||
|
|
||||||
|
dbpassword encodes second argument password to a base64 based cyber, this cyber need to write into the password attribute under kwargs, and print the cyber to stdout
|
||||||
|
|
||||||
|
### dbloader
|
||||||
|
load data in xlsx file to your database
|
||||||
|
|
||||||
|
* Syntax
|
||||||
|
dbloader server_path dbname xlsxfile
|
||||||
|
|
||||||
|
1 server_path is folder where server_path/conf/config.json file have specifies password_key and databases
|
||||||
|
|
||||||
|
2 dbname is database where the data will insert into
|
||||||
|
|
||||||
|
3 xlsxfile is a data file, can contains many sheets, sheet name is tablename, first rows of sheet contains the field name of the table, fields without data not need to appear in the sheet.
|
||||||
|
|
||||||
|
|
||||||
|
* dbloader get data from each named sheet in xlsxfile, and insert them to database parellally,
|
||||||
|
|
||||||
## API
|
## API
|
||||||
|
|
||||||
|
|
||||||
@ -175,25 +208,23 @@ how many databases and what database will using, and them connection parameters
|
|||||||
dbdesc data is a dict data, format of the dbdesc as follow:
|
dbdesc data is a dict data, format of the dbdesc as follow:
|
||||||
```
|
```
|
||||||
{
|
{
|
||||||
"aiocfae":{ # name to identify a database connect
|
"mydb1":{ # name to identify a database connect
|
||||||
"driver":"aiomysql", # database dbapi2 driver package name
|
"driver":"mysql", # database dbapi2 driver package name
|
||||||
"async_mode":True, # indicte this connection is asynchronous mode
|
"async_mode":True, # indicte this connection is asynchronous mode
|
||||||
"coding":"utf8", # charset coding
|
"coding":"utf8", # charset coding
|
||||||
"dbname":"cfae", # database real name
|
"dbname":"cfae", # database real name
|
||||||
"kwargs":{ # connection parameters
|
"kwargs":{ # connection parameters
|
||||||
"user":"test",
|
"user":"test",
|
||||||
"db":"cfae",
|
"db":"cfae",
|
||||||
"password":"test123",
|
"password":"encoded_password",
|
||||||
"host":"localhost"
|
"host":"localhost"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"cfae":{
|
"mydb2":{
|
||||||
"driver":"mysql.connector",
|
"driver":"postgresql",
|
||||||
"coding":"utf8",
|
|
||||||
"dbname":"cfae",
|
|
||||||
"kwargs":{
|
"kwargs":{
|
||||||
"user":"test",
|
"user":"test",
|
||||||
"db":"cfae",
|
"dbname":"cfae",
|
||||||
"password":"test123",
|
"password":"test123",
|
||||||
"host":"localhost"
|
"host":"localhost"
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
[metadata]
|
[metadata]
|
||||||
name=sqlor
|
name=sqlor
|
||||||
version = 2.0.1
|
version = 2.0.2
|
||||||
description = a new version of sqlor, each db's sor need to plugin to sqlor, and dbdriver now a isolated module
|
description = a new version of sqlor, each db's sor need to plugin to sqlor, and dbdriver now a isolated module
|
||||||
authors = yu moqing
|
authors = yu moqing
|
||||||
author_email = yumoqing@gmail.com
|
author_email = yumoqing@gmail.com
|
||||||
|
|||||||
@ -1,7 +1,50 @@
|
|||||||
from appPublic.worker import get_event_loop
|
import os
|
||||||
|
import sys
|
||||||
|
import asyncio
|
||||||
|
|
||||||
async def main():
|
from appPublic.worker import get_event_loop
|
||||||
print('to be programming')
|
from appPublic.jsonConfig import getConfig
|
||||||
|
from appPublic.worker import AsyncWorker
|
||||||
|
from sqlor.dbpools import DBPools
|
||||||
|
from appPublic.dictObject import DictObject
|
||||||
|
from xls2ddl.xlsxData import XLSXData
|
||||||
|
def chunked(lst, n):
|
||||||
|
for i in range(0, len(lst), n):
|
||||||
|
yield lst[i:i+n]
|
||||||
|
|
||||||
|
async def load_tabledata(dbname, tblname, data):
|
||||||
|
db = DBPools()
|
||||||
|
async with db.sqlorContext(dbname) as sor:
|
||||||
|
for r in data:
|
||||||
|
try:
|
||||||
|
await sor.D(tblname, {'id': r.id})
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
await sor.C(tblname, r.copy())
|
||||||
|
|
||||||
|
async def load_data():
|
||||||
|
if len(sys.argv) < 4:
|
||||||
|
print(f'{sys.argv[0]} server_path dbname datafile')
|
||||||
|
return 1
|
||||||
|
runpath = sys.argv[1]
|
||||||
|
dbname = sys.argv[2]
|
||||||
|
datafile = sys.argv[3]
|
||||||
|
config = getConfig(runpath)
|
||||||
|
db = DBPools(config.databases)
|
||||||
|
xlsx = XLSXData(datafile)
|
||||||
|
worker = AsyncWorker(maxtask=100)
|
||||||
|
tasks = []
|
||||||
|
for i,s in enumerate(xlsx.book.worksheets):
|
||||||
|
tblname = xlsx.book.sheetnames[i]
|
||||||
|
dic = xlsx.readRecords(tblname, s)
|
||||||
|
for chunk in chunked(dic[tblname], 100):
|
||||||
|
t = asyncio.create_task(load_tabledata(dbname, tblname, chunk))
|
||||||
|
tasks.append(t)
|
||||||
|
await asyncio.wait(tasks)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def main():
|
||||||
|
get_event_loop().run_until_complete(load_data())
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
get_event_loop().run_until_complete(main())
|
main()
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user