First commit

This commit is contained in:
DiG 2022-01-14 14:27:23 +01:00
commit 859fe7ae36
7 changed files with 493 additions and 0 deletions

142
.gitignore vendored Normal file
View File

@ -0,0 +1,142 @@
# Project
datas
# ---> Python
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/

7
.modelignore Normal file
View File

@ -0,0 +1,7 @@
ir.ui.view
ir.model.access
ir.module.module
ir.module.category
res.users
res.groups
res.company

3
.relationrc Normal file
View File

@ -0,0 +1,3 @@
#<model> <relation models>
product.uom

3
README.md Normal file
View File

@ -0,0 +1,3 @@
# odoo-export
Exports odoo models and datas to flat files (JSON or TSV)

8
ir.field.sh Normal file
View File

@ -0,0 +1,8 @@
#!/usr/bin/env bash
model="ir.model.fields"
folder="datas/$model"
mkdir -p "$folder"
#./odoo-rpc.py $model browse 20 > "$folder/20.json"
ids="$(./odoo-rpc.py $model search model_id.model = $1)"

324
odoo-rpc.py Normal file
View File

@ -0,0 +1,324 @@
#!/usr/bin/env python
# Usage:
# ./odoo-export.py <MODEL> <METHOD> [<param> [<param> [<param>]]]
from __future__ import print_function
import os
import sys
import json
import odoorpc # pip install odoorpc
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
# CONFIG
FOLDER = '/home/dig/odoo-export/datas'
PARAMS = sys.argv[1:]
MODEL_IGNORE = []
with open(".modelignore", "r") as file:
for line in file:
MODEL_IGNORE.append(line.strip())
eprint( len(sys.argv), 'argument(s):', str(sys.argv) )
eprint( MODEL_IGNORE )
# TABLE VIEW COLUMNS
cols = {
'': [ 'id', 'name' ], # default
'ir.model': [ 'id', 'model' ],
'ir.model.fields': [ 'id', 'name', 'ttype', 'create_uid', 'relation', 'display_name', 'complete_name' ],
}
def jlog( obj ):
print( json.dumps(obj, indent=4) ) #"\t") )
def ensure_dir(d):
if not os.path.exists(d):
os.makedirs(d)
def args2domains( list ):
domains = []
while len(list) > 0:
str = list[0]
if str == '&' or str == '|' or str == '!':
domains.append( str )
list = list[1:]
else:
domains.append( (list[0], list[1], list[2]) )
list = list[3:]
return domains
eprint( args2domains(sys.argv[3:]) )
#quit()
odoo = None
def autolog():
# Then, use the odoorpc.ODOO.load() class method:
odoo = odoorpc.ODOO.load('session')
# Or, if you have saved your configuration in another file:
# odoo = odoorpc.ODOO.load('tutorial', '~/my_own_odoorpcrc')
# You can check available sessions with odoorpc.ODOO.list(), and remove them with odoorpc.ODOO.remove():
# odoorpc.ODOO.list()
# ['session']
# odoorpc.ODOO.remove('tutorial')
# 'tutorial' not in odoorpc.ODOO.list()
# True
# Current user
user = odoo.env.user
eprint( 'User: ', user.name ) # name of the user connected
eprint( 'Company: ', user.company_id.name ) # the name of its company
eprint( '_________________________' )
return odoo
# Prepare the connection to the server
# odoo = odoorpc.ODOO('localhost', port=8069)
# Check available databases
# eprint( 'Databases: ', odoo.db.list() )
#jlog( odoo.db.list() )
# Simple 'raw' query
#user_data = odoo.execute('res.users', 'read', [user.id])
#print(user_data)
# Use all methods of a model
#if 'sale.order' in odoo.env:
# Order = odoo.env['sale.order']
# order_ids = Order.search([])
# for order in Order.browse(order_ids):
# print(order.name)
# products = [line.product_id.name for line in order.order_line]
# print(products)
# Update data through a record
#user.name = "Brian Jones"
def get_schema( name ):
eprint( 'get_schema' )
Model = odoo.env['ir.model']
ids = Model.search([( 'model', '=', name )])
#print( ids )
for model in Model.browse( ids ):
obj = model.read()[0]
fields = [ field.read()[0] for field in model.field_id ]
#jlog( fields )
obj['field_id'] = fields
jlog( transform_model(obj) )
eprint( "%s/schemas/%s.json" % ( FOLDER, model.model ) )
def transform_model( obj ):
return { "name": obj['model'], "modules": obj['modules'], 'fields': [ {} for field in obj['field_id'] ] }
def rel_fields( model ):
Model = odoo.env['ir.model.fields']
ids = Model.search([( 'model_id.model', '=', model ), ('relation','!=','false')])
return Model.browse( ids )
def tsv( *fields ):
print( *fields, sep='\t' )
def render_tsv( model, list ):
if model in cols:
_cols = cols[model]
else:
_cols = cols['']
tsv( *_cols )
for obj in list:
tsv( *[ obj[col] for col in _cols] )
#def render_json( model, obj ):
# COMMANDS
def login( server = None, port = None, db = None, user = None, passwd = None ):
if server:
tsv( 'Server:', server )
else:
server = raw_input('Server address: ')
if port:
tsv( 'Port:', port )
else:
port = raw_input('Port: ')
odoo = odoorpc.ODOO( server, port=port )
print( 'Connected to ' + server + ':' + port )
print( 'Available databases:' )
for _db in odoo.db.list():
print( _db )
if db:
tsv( 'Database:', db )
else:
db = raw_input('Choose database: ')
if user:
tsv( 'User:', user )
else:
user = raw_input('User: ')
if passwd:
tsv( 'Password:', '****************' )
else:
passwd = raw_input('Password: ')
eprint( server, port, user, passwd )
user = odoo.login( db, user, passwd )
odoo.save('session')
# # By default, these informations are stored in the ~/.odoorpcrc file. You can however use another file:
# # odoo.save('tutorial', '~/my_own_odoorpcrc')
def search( model, domains ):
Model = odoo.env[model]
ids = Model.search( domains )
render_tsv( model, Model.browse(ids) )
# print( 'id', 'name', sep='\t' )
# for inst in Model.browse( ids ):
# render_tsv( model, inst )
def fields( model, domains ):
Model = odoo.env['ir.model.fields']
ids = Model.search([( 'model_id.model', '=', MODEL )])
render_tsv( MODEL, Model.browse(ids) )
current_exports = []
def export_json( model, domains ):
print( "Export to json: %s %s" % (model,domains) )
if model in MODEL_IGNORE:
print( 'IGNORED' )
return
Model = odoo.env[model]
ids = Model.search( domains )
ensure_dir( "%s/%s" % (FOLDER, model) )
rfields = rel_fields( model )
eprint( rfields )
for inst in Model.browse( ids ):
data = inst.read()[0]
filename = "%s/%s/%s.json" % ( FOLDER, model, inst.id )
if filename in current_exports:
continue
print( 'Preparing data for %s' % (filename) )
current_exports.append( filename )
# Write pretty print JSON data to file
with open( filename, "w") as write_file:
json.dump(data, write_file, indent=4)
print( "%s written" % (filename) )
for field in rfields:
#data[field.name] = inst[field.name].read()[0]
#eprint( data[field.name] )
if field.relation in MODEL_IGNORE:
continue
if data[field.name]:
tsv( 'Field: ', field.ttype, field.name, field.relation, data[field.name] )
if field.ttype == 'many2one':
id = data[field.name][0]
_filename = "%s/%s/%s.json" % ( FOLDER, field.relation, id )
print( field.ttype, field.relation, id, _filename )
if not os.path.exists( _filename ):
export_json( field.relation, [('id','=',id)] )
else:
print('Already exists %s' % (_filename) )
if field.ttype == 'one2many':
for id in data[field.name]:
_filename = "%s/%s/%s.json" % ( FOLDER, field.relation, id )
print( field.ttype, field.relation, id, _filename )
if not os.path.exists( _filename ):
export_json( field.relation, [('id','=',id)] )
else:
print('Already exists %s' % (_filename) )
# AUTO EXEC
MODEL = PARAMS[0]
if len(PARAMS) > 1:
METHOD = PARAMS[1]
if PARAMS[0] == 'login':
login( *PARAMS[1:] )
quit()
else:
odoo = autolog()
if MODEL in odoo.env:
Model = odoo.env[MODEL]
if METHOD == 'search':
search( MODEL, args2domains(sys.argv[3:]) )
#ids = Model.search( args2domains(sys.argv[3:]) )
#print( 'id', 'name', sep='\t' )
#for inst in Model.browse( ids ):
# render_tsv( MODEL, inst )
if METHOD == 'browse':
ids = sys.argv[3:]
ids = [ int(s) for s in ids ]
#print( ids )
for inst in Model.browse( ids ):
jlog( inst.read()[0] )
if METHOD == 'export-json':
export_json( MODEL, args2domains(sys.argv[3:]) )
if METHOD == 'fields_get':
jlog( Model.fields_get() )
if METHOD == 'fields':
Model = odoo.env['ir.model.fields']
ids = Model.search([( 'model_id.model', '=', MODEL )])
render_tsv( MODEL, Model.browse(ids) )
#for inst in Field.browse( ids ):
# render_tsv( 'ir.model.fields', inst )
elif MODEL == 'db':
if METHOD == 'list':
jlog( odoo.db.list() )
elif MODEL == 'schema':
get_schema( METHOD )

6
product.uom.sh Normal file
View File

@ -0,0 +1,6 @@
#!/usr/bin/env bash
folder="datas/product.uom"
mkdir -p "$folder"
./odoo-rpc.py product.uom browse 20 > "$folder/20.json"