Commit 835ef2fa authored by Stefane Fermigier's avatar Stefane Fermigier

Initial import.

parents
.idea
.tox
.dox
.travis-solo
.DS_Store
.coverage
.vagrant
.cache
.coveralls.yml
.ropeproject
*,cover
sandbox
node_modules
env
whoosh*
abilian.db
tmp
cache
dist
tests/cache
tests/tmp
tests/extra_files
maxid.data
results
build
MANIFEST
docs/_build
htmlcov
junit-py27.xml
abilian/static/misc*
abilian/static/.webassets-cache
abilian/static/gen
abilian/static/placeholders
*.pyc
*.egg
*.egg-info
*.swp
*.prof
*.stats
*.mo
.cov2emacs.log
/.eggs/
[TYPECHECK]
# List of classes / modules names for which member attributes should not be checked
# (useful for classes with attributes dynamically set).
ignored-classes=SQLAlchemy
ignored-modules=lxml.etree
[REPORTS]
output-format=colorized
# Include message's id in output
msg-template={path}:{line}: [{msg_id}({symbol}), {obj}] {msg}
# Tells whether to display a full report or only the messages
reports=no
include *.rst *.txt *.ini *.py *.cfg *.yml
recursive-include olapy *
recursive-include cubes *
prune tmp
prune instance
prune .git
\ No newline at end of file
.PHONY: test unit full-test clean setup stage deploy
SRC=olapy
PKG=$(SRC)
default: test
#
# testing
#
test:
pytest . --durations=10
test-with-coverage:
pytest --tb=short --cov $(PKG) --cov-report term-missing .
#
# setup
#
develop:
@echo "--> Installing / updating python dependencies for development"
pip install -q pip-tools
pip-sync requirements.txt
pip install -q -r requirements.txt
pip install -e .
@echo ""
#
# Linting
#
lint: lint-python
lint-python:
@echo "--> Linting Python files"
-flake8 $(SRC) tests
@echo "Checking Py3k (basic) compatibility"
-pylint --rcfile .pylint.rc --py3k *.py $(SRC) tests
@echo "Running pylint, some errors reported might be false positives"
-pylint -E --rcfile .pylint.rc $(SRC)
clean:
find . -name "*.pyc" -delete
find . -name yaka.db -delete
find . -name .DS_Store -delete
find . -name cache -type d -delete
find . -type d -empty -delete
rm -f migration.log
rm -rf build dist
rm -rf tests/data tests/integration/data
rm -rf tmp tests/tmp tests/integration/tmp
rm -rf cache tests/cache tests/integration/cache
rm -rf *.egg .coverage
rm -rf doc/_build
rm -rf static/gen static/.webassets-cache instance/webassets
rm -rf htmlcov junit-*.xml
tidy: clean
rm -rf .tox
format:
isort -rc $(SRC) tests *.py
yapf --style google -r -i $(SRC) tests *.py
isort -rc $(SRC) tests *.py
update-deps:
pip-compile -U > /dev/null
pip-compile > /dev/null
git --no-pager diff requirements.txt
OlaPy, an experimental OLAP engine based on Pandas
==================================================
About
-----
**OlaPy** is an OLAP_ engine with MDX_ support, web interface (slice & dice) and XMLA_ support for Excel clients.
.. _OLAP: https://en.wikipedia.org/wiki/Online_analytical_processing
.. _MDX: https://en.wikipedia.org/wiki/MultiDimensional_eXpressions
.. _XMLA: https://en.wikipedia.org/wiki/XML_for_Analysis
Status
~~~~~~
This project is currently a research prototype, not suited for production use.
Licence
~~~~~~~
This project is currently licenced under the LGPL v3 licence.
Installation
------------
To set up the application, run, ideally in a virtualenv::
python setup.py install
or just::
pip install -e .
Usage
-----
With an XMLA client
~~~~~~~~~~~~~~~~~~~
To use XMLA from Excel, just start the XMLA server by executing in the root directory::
python olapy ( -c | --console , if you want to print logs only in the console)
and use the url: http://127.0.0.1:8000/xmla in Excel
* Be sure to use ``Provider=MSOLAP.6`` in Excel (see https://blogs.technet.microsoft.com/excel_services__powerpivot_for_sharepoint_support_blog/2015/10/30/configuring-the-msolap-data-provider-version).
* If you want to select many measures in Excel, select them before the dimensions and then drag and drop 'Values' attribute from COLUMNS to ROWS excel field.
With the demo web app
~~~~~~~~~~~~~~~~~~~~~
Run:
1. `python manage.py initdb` to initialize the db
2. `python manage.py runserver` to run application, using the following credential to log in:
- **login**: admin
- **password**: admin
Developing
----------
This project must adhere to the `Abilian Developer Guide <http://abilian-developer-guide.readthedocs.io/>`_.
Pull requests are welcome.
Tests
~~~~~
To run tests, run::
pytest tests
or simply (on Unix-like systems)::
make test
Credits
-------
This project is developed by `Abilian SAS <https://www.abilian.com>`_ and partially funded by the French Government through the `Investissement d'avenir <http://www.gouvernement.fr/investissements-d-avenir-cgi>`_ programme.
dependencies:
cache_directories:
- "~/.cache/"
override:
- pip install -U pip tox
# Just to fill up the cache (to speed up later builds)
- pip install -q -r requirements.txt
test:
override:
- tox
************************
* IMPORTANT *
************************
1) Fact table must have 'Facts' name
2) the id columns must have _id at the end (product_id, person_id...)
3) each column name of each table must be unique from the other tables column names
4) the columns name must be in a good order (hierarchy)
5) use a simple star modele with one key and numeric measures ( the other cases not tested yet)
6) verify that data entered in tables does not contains any white spaces at the end
7) if you want to set up some orders to the tables , put the desired order with tables name , for exmple 1Geography.csv , 2Product.csv .... [ useful for excel level display]
8) time dimension must be named 'Time'
\ No newline at end of file
This diff is collapsed.
This source diff could not be displayed because it is too large. You can view the blob instead.
"store_id";"store_type";"store_name";"store_number";"store_street_address";"store_city";"store_state";"store_postal_code";"store_country";"store_manager";"store_phone";"store_fax";"first_opened_date";"last_remodel_date";"store_sqft";"grocery_sqft";"frozen_sqft";"meat_sqft";"coffee_bar";"video_store";"salad_bar";"prepared_food";"florist"
0;"HeadQuarters";"HQ";0;"1 Alameda Way";"Alameda";"CA";"55555";"USA";"";"";"";"";"";0;0;0;0;0;0;0;0;0
1;"Supermarket";"Store 1";1;"2853 Bailey Rd";"Acapulco";"Guerrero";"55555";"Mexico";"Jones";"262-555-5124";"262-555-5121";"1982-01-09 00:00:00";"1990-12-05 00:00:00";23593;17475;3671;2447;0;0;0;0;0
2;"Small Grocery";"Store 2";2;"5203 Catanzaro Way";"Bellingham";"WA";"55555";"USA";"Smith";"605-555-8203";"605-555-8201";"1970-04-02 00:00:00";"1973-06-04 00:00:00";28206;22271;3561;2374;1;0;0;0;0
3;"Supermarket";"Store 3";3;"1501 Ramsey Circle";"Bremerton";"WA";"55555";"USA";"Davis";"509-555-1596";"509-555-1591";"1959-06-14 00:00:00";"1967-11-19 00:00:00";39696;24390;9184;6122;0;0;1;1;0
4;"Gourmet Supermarket";"Store 4";4;"433 St George Dr";"Camacho";"Zacatecas";"55555";"Mexico";"Johnson";"304-555-1474";"304-555-1471";"1994-09-27 00:00:00";"1995-12-01 00:00:00";23759;16844;4149;2766;1;0;1;1;1
5;"Small Grocery";"Store 5";5;"1250 Coggins Drive";"Guadalajara";"Jalisco";"55555";"Mexico";"Green";"801-555-4324";"801-555-4321";"1978-09-18 00:00:00";"1991-06-29 00:00:00";24597;15012;5751;3834;1;0;0;0;0
6;"Gourmet Supermarket";"Store 6";6;"5495 Mitchell Canyon Road";"Beverly Hills";"CA";"55555";"USA";"Maris";"958-555-5002";"958-555-5001";"1981-01-03 00:00:00";"1991-03-13 00:00:00";23688;15337;5011;3340;1;1;1;1;1
7;"Supermarket";"Store 7";7;"1077 Wharf Drive";"Los Angeles";"CA";"55555";"USA";"White";"477-555-7967";"477-555-7961";"1971-05-21 00:00:00";"1981-10-20 00:00:00";23598;14210;5633;3755;0;0;0;0;1
8;"Deluxe Supermarket";"Store 8";8;"3173 Buena Vista Ave";"Merida";"Yucatan";"55555";"Mexico";"Williams";"797-555-3417";"797-555-3411";"1958-09-23 00:00:00";"1967-11-18 00:00:00";30797;20141;6393;4262;1;1;1;1;1
9;"Mid-Size Grocery";"Store 9";9;"1872 El Pintado Road";"Mexico City";"DF";"55555";"Mexico";"Stuber";"439-555-3524";"439-555-3521";"1955-03-18 00:00:00";"1959-06-07 00:00:00";36509;22450;8435;5624;0;0;0;0;0
10;"Supermarket";"Store 10";10;"7894 Rotherham Dr";"Orizaba";"Veracruz";"55555";"Mexico";"Merz";"212-555-4774";"212-555-4771";"1979-04-13 00:00:00";"1982-01-30 00:00:00";34791;26354;5062;3375;0;0;1;1;0
11;"Supermarket";"Store 11";11;"5371 Holland Circle";"Portland";"OR";"55555";"USA";"Erickson";"685-555-8995";"685-555-8991";"1976-09-17 00:00:00";"1982-05-15 00:00:00";20319;16232;2452;1635;0;0;0;0;0
12;"Deluxe Supermarket";"Store 12";12;"1120 Westchester Pl";"Hidalgo";"Zacatecas";"55555";"Mexico";"Kalman";"151-555-1702";"151-555-1701";"1968-03-25 00:00:00";"1993-12-18 00:00:00";30584;21938;5188;3458;1;1;1;1;1
13;"Deluxe Supermarket";"Store 13";13;"5179 Valley Ave";"Salem";"OR";"55555";"USA";"Inmon";"977-555-2724";"977-555-2721";"1957-04-13 00:00:00";"1997-11-10 00:00:00";27694;18670;5415;3610;1;1;1;1;1
14;"Small Grocery";"Store 14";14;"4365 Indigo Ct";"San Francisco";"CA";"55555";"USA";"Strehlo";"135-555-4888";"135-555-4881";"1957-11-24 00:00:00";"1958-01-07 00:00:00";22478;15321;4294;2863;1;0;0;0;0
15;"Supermarket";"Store 15";15;"5006 Highland Drive";"Seattle";"WA";"55555";"USA";"Ollom";"893-555-1024";"893-555-1021";"1969-07-24 00:00:00";"1973-10-19 00:00:00";21215;13305;4746;3164;1;0;0;0;0
16;"Supermarket";"Store 16";16;"5922 La Salle Ct";"Spokane";"WA";"55555";"USA";"Mantle";"643-555-3645";"643-555-3641";"1974-08-23 00:00:00";"1977-07-13 00:00:00";30268;22063;4923;3282;0;0;0;0;0
17;"Deluxe Supermarket";"Store 17";17;"490 Risdon Road";"Tacoma";"WA";"55555";"USA";"Mays";"855-555-5581";"855-555-5581";"1970-05-30 00:00:00";"1976-06-23 00:00:00";33858;22123;7041;4694;1;0;1;1;1
18;"Mid-Size Grocery";"Store 18";18;"6764 Glen Road";"Hidalgo";"Zacatecas";"55555";"Mexico";"Brown";"528-555-8317";"528-555-8311";"1969-06-28 00:00:00";"1975-08-30 00:00:00";38382;30351;4819;3213;0;0;0;0;0
19;"Deluxe Supermarket";"Store 19";19;"6644 Sudance Drive";"Vancouver";"BC";"55555";"Canada";"Ruth";"862-555-7395";"862-555-7391";"1977-03-27 00:00:00";"1990-10-25 00:00:00";23112;16418;4016;2678;1;1;1;1;1
20;"Mid-Size Grocery";"Store 20";20;"3706 Marvelle Ln";"Victoria";"BC";"55555";"Canada";"Cobb";"897-555-1931";"897-555-1931";"1980-02-06 00:00:00";"1987-04-09 00:00:00";34452;27463;4193;2795;1;0;0;0;1
21;"Deluxe Supermarket";"Store 21";21;"4093 Steven Circle";"San Andres";"DF";"55555";"Mexico";"Jones";"493-555-4781";"493-555-4781";"1986-02-07 00:00:00";"1990-04-16 00:00:00";0;0;0;0;1;0;1;1;1
22;"Small Grocery";"Store 22";22;"9606 Julpum Loop";"Walla Walla";"WA";"55555";"USA";"Byrg";"881-555-5117";"881-555-5111";"1951-01-24 00:00:00";"1969-10-17 00:00:00";0;0;0;0;0;0;0;0;0
23;"Mid-Size Grocery";"Store 23";23;"3920 Noah Court";"Yakima";"WA";"55555";"USA";"Johnson";"170-555-8424";"170-555-8421";"1977-07-16 00:00:00";"1987-07-24 00:00:00";0;0;0;0;0;0;0;0;0
24;"Supermarket";"Store 24";24;"2342 Waltham St.";"San Diego";"CA";"55555";"USA";"Byrd";"111-555-0303";"111-555-0304";"1979-05-22 00:00:00";"1986-04-20 00:00:00";0;0;0;0;1;0;1;0;1
This diff is collapsed.
"warehouse_id";"warehouse_name";"wa_address1";"wa_address2";"wa_address3";"wa_address4";"warehouse_city";"warehouse_state_province";"warehouse_postal_code";"warehouse_country";"warehouse_owner_name";"warehouse_phone";"warehouse_fax"
1;"Salka Warehousing";"9716 Alovera Road";"";"";"";"Acapulco";"Guerrero";"55555";"Mexico";"";"821-555-1658";"594-555-2908"
2;"Foster Products";"958 Hilltop Dr";"";"";"";"Bellingham";"WA";"55555";"USA";"";"315-555-8947";"119-555-3826"
3;"Destination, Inc.";"4162 Euclid Ave";"";"";"";"Bremerton";"WA";"55555";"USA";"";"517-555-3022";"136-555-4501"
4;"Anderson Warehousing";"5657 Georgia Dr";"";"";"";"Camacho";"Zacatecas";"55555";"Mexico";"";"681-555-1655";"946-555-4848"
5;"Focus, Inc.";"9116 Tice Valley Blv.";"";"";"";"Guadalajara";"Jalisco";"55555";"Mexico";"";"344-555-5530";"379-555-9065"
6;"Big Quality Warehouse";"3521 Fourth Stret";"";"";"";"Beverly Hills";"CA";"55555";"USA";"";"892-555-3590";"388-555-7926"
7;"Artesia Warehousing, Inc.";"9889 Matterhorn Court";"";"";"";"Los Angeles";"CA";"55555";"USA";"";"859-555-2792";"740-555-6556"
8;"Bastani and Sons";"1893 Northridge Drive";"";"";"";"Marida";"Yucatan";"55555";"Mexico";"";"846-555-3024";"804-555-6674"
9;"Freeman And Co";"234 West Covina Pkwy";"";"";"";"Mexico City";"DF";"55555";"Mexico";"";"";""
10;"Jamison, Inc.";"1172 Liana Lane";"";"";"";"Orizaba";"Veracruz";"55555";"Mexico";"";"379-555-5756";"436-555-7920"
11;"Quality Distribution, Inc.";"6085 Darlene Drive";"";"";"";"Portland";"OR";"55555";"USA";"";"181-555-3588";"269-555-8381"
12;"Arnold and Sons";"5617 Saclan Terrace";"";"";"";"Hidalgo";"Zacatecas";"55555";"Mexico";"";"360-555-8035";"263-555-1427"
13;"Treehouse Distribution";"5473 Olive Hill";"";"";"";"Salem";"OR";"55555";"USA";"";"831-555-6210";"481-555-1317"
14;"Food Service Storage, Inc.";"5259 Mildred Ln";"";"";"";"San Francisco";"CA";"55555";"USA";"";"471-555-2456";"552-555-3249"
15;"Quality Warehousing and Trucking";"3337 Northpoint Ct";"";"";"";"Seattle";"WA";"55555";"USA";"";"427-555-9060";"869-555-1652"
16;"Jones International";"3377 Coachman Place";"";"";"";"Spokane";"WA";"55555";"USA";"";"144-555-5192";"971-555-6213"
17;"Jorge Garcia, Inc.";"4364 Viera Avenue";"";"";"";"Tacoma";"WA";"55555";"USA";"";"200-555-1310";"442-555-5874"
18;"Worthington Food Products";"4659 Cape Cod Way";"";"";"";"Hidalgo";"Zacatecas";"55555";"Mexico";"";"609-555-5413";"952-555-8492"
19;"Bellmont Distributing";"5900 May Rd";"";"";"";"Vancouver";"BC";"55555";"Canada";"";"226-555-5335";"361-555-7385"
20;"Rose Food Warehousing";"9104 Jacobsen Street";"";"";"";"Victoria";"BC";"55555";"Canada";"";"143-555-7496";"332-555-1803"
21;"Derby and Hunt";"600 Lake Nadine Place";"";"";"";"San Andres";"DF";"55555";"Mexico";"";"922-555-5214";"618-555-6578"
22;"Valdez Warehousing";"6714 Roundtree Court";"";"";"";"Walla Walla";"WA";"55555";"USA";"";"301-555-8174";"666-555-9881"
23;"Maddock Stored Foods";"8463 Kim Court";"";"";"";"Yakima";"WA";"55555";"USA";"";"803-555-8978";"630-555-2485"
24;"Jorgensen Service Storage";"4832 Park Glen Ct";"";"";"";"San Diego";"CA";"55555";"USA";"";"259-555-2824";"470-555-4760"
Day;City;Licence;Amount;Count
May 12,2010;Madrid;Personal;1;84
May 13,2010;Barcelona;Personal;2;841
May 14,2010;Paris;Personal;4;2
May 15,2010;Lausanne;Personal;8;231
May 16,2010;Lausanne;Corporate;16;4
May 17,2010;Lausanne;Partnership;32;65
May 18,2010;Zurich;Partnership;64;64
May 19,2010;Geneva;Corporate;128;13
May 20,2010;New York;Corporate;256;12
May 21,2010;New York;Corporate;512;564
Continent;Country;City
America;Canada;Quebec
America;Canada;Toronto
America;United States;Los Angeles
America;United States;New York
America;United States;San Francisco
America;Mexico;Mexico
America;Venezuela;Caracas
Europe;France;Paris
Europe;Spain;Barcelona
Europe;Spain;Madrid
Europe;Spain;Valencia
Europe;Switzerland;Geneva
Europe;Switzerland;Lausanne
Europe;Switzerland;Zurich
Company;Article;Licence
Crazy Development;olapy;Corporate
Crazy Development;olapy;Partnership
Crazy Development;olapy;Personal
Crazy Development;olapy;Startup
This diff is collapsed.
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " applehelp to make an Apple Help Book"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " epub3 to make an epub3"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " coverage to run coverage check of the documentation (if enabled)"
@echo " dummy to check syntax errors of document sources"
.PHONY: clean
clean:
rm -rf $(BUILDDIR)/*
.PHONY: html
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
.PHONY: dirhtml
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
.PHONY: singlehtml
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
.PHONY: pickle
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
.PHONY: json
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
.PHONY: htmlhelp
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
.PHONY: qthelp
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/olapy.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/olapy.qhc"
.PHONY: applehelp
applehelp:
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
@echo
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
@echo "N.B. You won't be able to view it unless you put it in" \
"~/Library/Documentation/Help or install it in your application" \
"bundle."
.PHONY: devhelp
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/olapy"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/olapy"
@echo "# devhelp"
.PHONY: epub
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
.PHONY: epub3
epub3:
$(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
@echo
@echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
.PHONY: latex
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
.PHONY: latexpdf
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: latexpdfja
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: text
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
.PHONY: man
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
.PHONY: texinfo
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
.PHONY: info
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
.PHONY: gettext
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
.PHONY: changes
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
.PHONY: linkcheck
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
.PHONY: doctest
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
.PHONY: coverage
coverage:
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
@echo "Testing of coverage in the sources finished, look at the " \
"results in $(BUILDDIR)/coverage/python.txt."
.PHONY: xml
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
.PHONY: pseudoxml
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
.PHONY: dummy
dummy:
$(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
@echo
@echo "Build finished. Dummy builder generates no files."
.. _api:
API Documentation
=================
Package ``olapy.core.mdx.parser``
---------------------------------
First, to import the package use::
import olapy.core.mdx.parser.parse
.. automodule:: olapy.core.mdx.parser.parse
.. autoclass:: MdxParser
:members:
parsing_mdx_query
Package ``olapy.core.mdx.executor``
-----------------------------------
to import the package use::
import olapy.core.mdx.executor.execute
.. automodule:: olapy.core.mdx.executor.execute
.. autoclass:: MdxEngine
:members:
get_cubes_names,
get_cube,
get_all_tables_names,
get_cube,
get_measures,
load_tables,
change_measures,
get_tables_and_columns,
execute_one_tuple,
add_missed_column,
execute_mdx
**MdxEngine.load_star_schema_dataframe** contains a DataFrame of all dimension merged in one (the star schema in one table)
Package ``olapy.core.services.xmla``
------------------------------------
to import the package use::
import olapy.core.services.xmla
.. automodule:: olapy.core.services.xmla
.. autoclass:: XmlaProviderService
:members:
change_catalogue,
Discover,
Execute
.. autofunction:: start_server
.. automodule:: olapy.core.services.xmla_discover_tools
.. autoclass:: XmlaDiscoverTools
:members:
change_catalogue
.. automodule:: olapy.core.services.xmla_execute_tools
.. autoclass:: XmlaExecuteTools
:members:
split_DataFrame,
get_tuple_without_nan,
check_measures_only_selected,
generate_xs0_measures_only,
generate_xs0,
generate_cell_data,
generate_axes_info,
generate_slicer_axis
.. automodule:: olapy.core.services.logger
.. autoclass:: Logs
:members:
write_log
\ No newline at end of file
# -*- coding: utf-8 -*-
#
# olapy documentation build configuration file, created by
# sphinx-quickstart on Mon Jan 23 16:47:07 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'olapy'
copyright = u'2017, Mouadh Kaabachi'
author = u'Mouadh Kaabachi'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.0.1'
# The full version, including alpha/beta/rc tags.
release = u'0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'haiku'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'olapy v0.0.1'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'olapydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'olapy.tex', u'olapy Documentation',
u'Mouadh Kaabachi', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'olapy', u'olapy Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'olapy', u'olapy Documentation',
author, 'olapy', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
.. _cubes:
Cubes Creation
==============
If you want to add new cubes, this is very simple, just paste your csv files under a new folder with (its name will be your new cube name)
in cubes folder, so the path to your cube become, olapy/cubes/YOUR_CUBE/YOU_CSV_FILES
**IMPORTANT**
Here are the rules to apply in the tables (csv files) so that it works perfectly:
1) make sure that your tables follow the `star schema <http://datawarehouse4u.info/Data-warehouse-schema-architecture-star-schema.html>`_
2) Fact table must have 'Facts' name
3) the id columns must have _id at the end (product_id, person_id...)
4) the columns name must be in a good order (hierarchy) (example : Continent -> Country -> City...)
5) each column name of each table must be unique from the other tables column names
6) verify that data entered in tables does not contains any white spaces at the end
7) if you want to set up some orders to the tables , put the desired order with tables name , for exmple 1Geography.csv , 2Product.csv
8) time dimension must be named 'Time'
*take a look to the default cubes structure (sales and foodmart)*
Here are two examples of dimensions structure:
Examples:
^^^^^^^^^
Cube 1
++++++
Geography table
---------------
+------------+------------+-----------+
| Geo_id | Continent | Country |
+============+============+===========+
| 0001 | America | Canada |
+------------+------------+-----------+
| bla bla bla |
+------------+------------+-----------+
| 00526 | Europe | France |
+------------+------------+-----------+
Facts table
-----------
+------------+------------+-----------+-----------+
| Geo_id | Prod_id | Amount | Count |
+============+============+===========+===========+
| 0001 | 111111 | 5000 | 20 |
+------------+------------+-----------+-----------+
| bla bla bla bla |
+------------+------------+-----------+-----------+
| 0011 | 222222 | 1000 | 40 |
+------------+------------+-----------+-----------+
Product table
-------------
+------------+------------+-----------+
| Prod_id | Company | Name |
+============+============+===========+
| 111111 | Ferrero | Nutella |
+------------+------------+-----------+
| bla bla bla |
+------------+------------+-----------+
| 222222 | Nestle | KitKat |
+------------+------------+-----------+
-------------------------------------------
Cube 2
++++++
*here we don't use id in tables*
Geography table
---------------
+-----------+------------+
| Continent | Country |
+============+===========+
| America | Canada |
+------------+-----------+
| bla bla bla |
+------------+-----------+
| Europe | France |
+------------+-----------+
Facts table
-----------
+------------+------------+-----------+-----------+
| Continent | Company | Amount | Count |
+============+============+===========+===========+
| America | Ferrero | 5000 | 20 |
+------------+------------+-----------+-----------+
| bla bla bla bla |
+------------+------------+-----------+-----------+
| Europe | Nestle | 1000 | 40 |
+------------+------------+-----------+-----------+
Product table
-------------
+------------+-----------+
| Company | Name |
+============+===========+
| Ferrero | Nutella |
+------------+-----------+
| bla bla bla |
+------------+-----------+
| Nestle | KitKat |
+------------+-----------+
.. olapy documentation master file, created by
sphinx-quickstart on Mon Jan 23 16:47:07 2017.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to olapy's documentation!
=================================
**olapy** is an OLAP engine with MDX support, web interface (slice & dice) and XMLA support for excel client.
it can be found at
`olapy <https://github.com/abilian/olapy-core>`_.
Simple use
----------
For the impatient: here's a quick overview of how to use this project. Normally,
all you have to do is install the application first, like this::
python setup.py install
and then, it is already done, from the root directory *(which is olapy-core)* you can
**Starting XMLA Server** by using::
python olapy ( -c | --console , if you want to print logs only in the console)
and use the url :http://127.0.0.1:8000/xmla in excel the see your :ref:`cubes`.
- Be sure to use `Provider=MSOLAP.6 <https://blogs.technet.microsoft.com/excel_services__powerpivot_for_sharepoint_support_blog/2015/10/30/configuring-the-msolap-data-provider-version/>`_ in excel
- If you want to select many measures in excel, select them before the dimensions and then drag and drop 'Values' attribute from COLUMNS to ROWS excel field
**Starting Web DEMO** by using::
1) python manage.py initdb : to initialize db *(one time only)*
2) python manage.py runserver : to run application
**login** : admin
**password** : admin
* to drop the database use::
python manage.py dropdb
Tests
^^^^^
to run tests (make sure that the server is started) ::
python -m pytest tests
For more information about how to use this library, see the :ref:`api`.
Features
--------
- executing `MDX <https://msdn.microsoft.com/en-us/library/ms145514.aspx>`_ query
- An excel demo
- A web demo
Contents:
.. toctree::
:maxdepth: 2
cubes
api
web
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
@ECHO OFF
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set BUILDDIR=_build
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
set I18NSPHINXOPTS=%SPHINXOPTS% .
if NOT "%PAPER%" == "" (
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
)
if "%1" == "" goto help
if "%1" == "help" (
:help
echo.Please use `make ^<target^>` where ^<target^> is one of
echo. html to make standalone HTML files
echo. dirhtml to make HTML files named index.html in directories
echo. singlehtml to make a single large HTML file
echo. pickle to make pickle files
echo. json to make JSON files
echo. htmlhelp to make HTML files and a HTML help project
echo. qthelp to make HTML files and a qthelp project
echo. devhelp to make HTML files and a Devhelp project
echo. epub to make an epub
echo. epub3 to make an epub3
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
echo. text to make text files
echo. man to make manual pages
echo. texinfo to make Texinfo files
echo. gettext to make PO message catalogs
echo. changes to make an overview over all changed/added/deprecated items
echo. xml to make Docutils-native XML files
echo. pseudoxml to make pseudoxml-XML files for display purposes
echo. linkcheck to check all external links for integrity
echo. doctest to run all doctests embedded in the documentation if enabled
echo. coverage to run coverage check of the documentation if enabled
echo. dummy to check syntax errors of document sources
goto end
)
if "%1" == "clean" (
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
del /q /s %BUILDDIR%\*
goto end
)
REM Check if sphinx-build is available and fallback to Python version if any
%SPHINXBUILD% 1>NUL 2>NUL
if errorlevel 9009 goto sphinx_python
goto sphinx_ok
:sphinx_python
set SPHINXBUILD=python -m sphinx.__init__
%SPHINXBUILD% 2> nul
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
:sphinx_ok
if "%1" == "html" (
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
goto end
)
if "%1" == "dirhtml" (
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
goto end
)
if "%1" == "singlehtml" (
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
goto end
)
if "%1" == "pickle" (
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the pickle files.
goto end
)
if "%1" == "json" (
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the JSON files.
goto end
)
if "%1" == "htmlhelp" (
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run HTML Help Workshop with the ^
.hhp project file in %BUILDDIR%/htmlhelp.
goto end
)
if "%1" == "qthelp" (
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run "qcollectiongenerator" with the ^
.qhcp project file in %BUILDDIR%/qthelp, like this:
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\olapy.qhcp
echo.To view the help file:
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\olapy.ghc
goto end
)
if "%1" == "devhelp" (
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished.
goto end
)
if "%1" == "epub" (
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The epub file is in %BUILDDIR%/epub.
goto end
)
if "%1" == "epub3" (
%SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The epub3 file is in %BUILDDIR%/epub3.
goto end
)
if "%1" == "latex" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
if errorlevel 1 exit /b 1
echo.
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdf" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf
cd %~dp0
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdfja" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf-ja
cd %~dp0
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "text" (
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The text files are in %BUILDDIR%/text.
goto end
)
if "%1" == "man" (
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The manual pages are in %BUILDDIR%/man.
goto end
)
if "%1" == "texinfo" (
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
goto end
)
if "%1" == "gettext" (
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
goto end
)
if "%1" == "changes" (
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
if errorlevel 1 exit /b 1
echo.
echo.The overview file is in %BUILDDIR%/changes.
goto end
)
if "%1" == "linkcheck" (
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
if errorlevel 1 exit /b 1
echo.
echo.Link check complete; look for any errors in the above output ^
or in %BUILDDIR%/linkcheck/output.txt.
goto end
)
if "%1" == "doctest" (
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
if errorlevel 1 exit /b 1
echo.
echo.Testing of doctests in the sources finished, look at the ^
results in %BUILDDIR%/doctest/output.txt.
goto end
)
if "%1" == "coverage" (
%SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
if errorlevel 1 exit /b 1
echo.
echo.Testing of coverage in the sources finished, look at the ^
results in %BUILDDIR%/coverage/python.txt.
goto end
)
if "%1" == "xml" (
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The XML files are in %BUILDDIR%/xml.
goto end
)
if "%1" == "pseudoxml" (
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
goto end
)
if "%1" == "dummy" (
%SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy
if errorlevel 1 exit /b 1
echo.
echo.Build finished. Dummy builder generates no files.
goto end
)
:end
.. _web:
WEB DEMO
========
Package ``olapy.web``
---------------------
to import the package use::
import olapy.web
.. automodule:: olapy.web.pivottable
:members:
.. automodule:: olapy.web.stats_utils
:members:
.. automodule:: olapy.web.views
:members:
from __future__ import absolute_import, division, print_function
from flask_script import Manager, prompt_bool
from olapy.web import app, db
from olapy.web.models import User
manager = Manager(app)
@manager.command
def initdb():
db.create_all()
db.session.add(
User(
username="admin", email="admin@admin.com", password='admin'))
db.session.add(
User(
username="demo", email="demo@demo.com", password="demo"))
db.session.commit()
print('Initialized the database')
@manager.command
def dropdb():
if prompt_bool('Are you sure you want to lose all your data? '):
db.drop_all()
print('Dropped the database')
if __name__ == '__main__':
manager.run()
This diff is collapsed.
Benchmarks are made with cpu :
Intel(R) Core(TM) i7-2630QM CPU @ 2.00GHz
Query 1 :
SELECT
FROM [temp_cube]
WHERE ([Measures].[Amount])
CELL PROPERTIES VALUE, FORMAT_STRING, LANGUAGE, BACK_COLOR, FORE_COLOR, FONT_FLAGS
----------------------------------------------------------
Query 2 :
SELECT
NON EMPTY Hierarchize(AddCalculatedMembers(DrilldownMember({{{
[table0].[table0].[All table0A].Members}}}, {
[table0].[table0].[table0A].[301]})))
DIMENSION PROPERTIES PARENT_UNIQUE_NAME,HIERARCHY_UNIQUE_NAME
ON COLUMNS
FROM [temp_cube]
WHERE ([Measures].[Amount])
CELL PROPERTIES VALUE, FORMAT_STRING, LANGUAGE, BACK_COLOR, FORE_COLOR, FONT_FLAGS
----------------------------------------------------------
Query 3 :
SELECT NON EMPTY Hierarchize(AddCalculatedMembers(DrilldownMember({{{
[table0].[table0].[All table0A].Members}}}, {
[table0].[table0].[table0A].[263],
[table0].[table0].[table0A].[301],
[table0].[table0].[table0A].[999],
[table0].[table0].[table0A].[59],
[table0].[table0].[table0A].[729],
[table0].[table0].[table0A].[72]
})))
DIMENSION PROPERTIES PARENT_UNIQUE_NAME,HIERARCHY_UNIQUE_NAME
ON COLUMNS
FROM [temp_cube]
WHERE ([Measures].[Amount])
CELL PROPERTIES VALUE, FORMAT_STRING, LANGUAGE, BACK_COLOR, FORE_COLOR, FONT_FLAGS
----------------------------------------------------------
+---------+----------------------+
| Query | olapy execution time |
+---------+----------------------+
| Query 1 | 0.219430952509 |
| Query 2 | 7.1477203975 |
| Query 3 | 7.4898611635 |
+---------+----------------------+
******************************************************************************
* mondrian with "warehouse" Cube (note the same as olapy but resemble to it) *
* (olapy warehouse"s cube has more rows) *
******************************************************************************
Query 1 :
SELECT
NON EMPTY {[Measures].[Supply Time]}
DIMENSION PROPERTIES PARENT_UNIQUE_NAME ON 0
FROM
[Warehouse]
----------------------------------------------------------
Query 2 :
SELECT
NON EMPTY CrossJoin(Hierarchize({
[Product].[Brand Name].Members,
[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Wine].[Pearl].Children}), {
[Measures].[Supply Time]})
DIMENSION PROPERTIES PARENT_UNIQUE_NAME ON 0
FROM [Warehouse]
----------------------------------------------------------
Query 3 :
SELECT
NON EMPTY CrossJoin(CrossJoin(Hierarchize({
[Product].[Brand Name].Members}),Hierarchize({
[Store Type].[All Store Types],
[Store Type].[All Store Types].Children})),
{[Measures].[Supply Time]})
DIMENSION PROPERTIES PARENT_UNIQUE_NAME ON 0
FROM [Warehouse]
----------------------------------------------------------
+---------+---------------+----------------+
| Query | mondrian | olapy |
+---------+---------------+----------------+
| Query 1 | 20.1405278917 | 0.749627827854 |
| Query 2 | 15.2551661497 | 3.80780009186 |
| Query 3 | 14.7896879707 | 7.87186091205 |
+---------+---------------+----------------+
******************************************
* iCcube v4.8.2 with "sales Excel" Cube *
******************************************
Query 1 :
SELECT
FROM [Sales]
WHERE ([Measures].[Amount])
CELL PROPERTIES VALUE, FORMAT_STRING, LANGUAGE, BACK_COLOR, FORE_COLOR, FONT_FLAGS
----------------------------------------------------------
Query 2 :
SELECT
NON EMPTY Hierarchize(AddCalculatedMembers({DrilldownLevel({
[Geography].[Geo].[All Continent]})}))
DIMENSION PROPERTIES PARENT_UNIQUE_NAME,HIERARCHY_UNIQUE_NAME
ON COLUMNS
FROM [Sales]
WHERE ([Measures].[Amount])
CELL PROPERTIES VALUE, FORMAT_STRING, LANGUAGE, BACK_COLOR, FORE_COLOR, FONT_FLAGS
----------------------------------------------------------
Query 3 :
SELECT
NON EMPTY Hierarchize(AddCalculatedMembers(DrilldownMember({{DrilldownMember({{{
[Geography].[Geography].[All Continent].Members}}}, {
[Geography].[Geography].[Continent].[America],
[Geography].[Geography].[Continent].[Europe]})}}, {
[Geography].[Geography].[Continent].[America].[United States],
[Geography].[Geography].[Continent].[Europe].[France],
[Geography].[Geography].[Continent].[Europe].[Spain]})))
DIMENSION PROPERTIES PARENT_UNIQUE_NAME,HIERARCHY_UNIQUE_NAME
ON COLUMNS
FROM [sales]
WHERE ([Measures].[Amount])
CELL PROPERTIES VALUE, FORMAT_STRING, LANGUAGE, BACK_COLOR, FORE_COLOR, FONT_FLAGS
----------------------------------------------------------
Query 4 :
SELECT
NON EMPTY CrossJoin(Hierarchize(AddCalculatedMembers(DrilldownMember({{DrilldownMember({{{
[Geography].[Geography].[All Continent].Members}}}, {
[Geography].[Geography].[Continent].[America],
[Geography].[Geography].[Continent].[Europe]})}}, {
[Geography].[Geography].[Continent].[America].[United States],
[Geography].[Geography].[Continent].[Europe].[France],
[Geography].[Geography].[Continent].[Europe].[Spain]}))), Hierarchize(AddCalculatedMembers({
[Product].[Product].[Company].Members})))
DIMENSION PROPERTIES PARENT_UNIQUE_NAME,HIERARCHY_UNIQUE_NAME
ON COLUMNS
FROM [sales]
WHERE ([Measures].[Amount])
CELL PROPERTIES VALUE, FORMAT_STRING, LANGUAGE, BACK_COLOR, FORE_COLOR, FONT_FLAGS
----------------------------------------------------------
Query 5 :
SELECT
NON EMPTY CrossJoin(CrossJoin(Hierarchize(AddCalculatedMembers(DrilldownMember({{DrilldownMember({{{
[Geography].[Geography].[All Continent].Members}}}, {
[Geography].[Geography].[Continent].[America],
[Geography].[Geography].[Continent].[Europe]})}}, {
[Geography].[Geography].[Continent].[America].[United States],
[Geography].[Geography].[Continent].[Europe].[France],
[Geography].[Geography].[Continent].[Europe].[Spain]}))),
Hierarchize(AddCalculatedMembers(DrilldownMember({{DrilldownMember({{
[Product].[Product].[Company].Members}}, {
[Product].[Product].[Company].[Crazy Development]})}}, {
[Product].[Product].[Company].[Crazy Development].[olapy]})))),
Hierarchize(AddCalculatedMembers(DrilldownMember({{DrilldownMember({{DrilldownMember({{
[Time].[Time].[Year].Members}}, {
[Time].[Time].[Year].[2010]})}}, {
[Time].[Time].[Year].[2010].[Q2 2010]})}}, {
[Time].[Time].[Year].[2010].[Q2 2010].[May 2010]}))))
DIMENSION PROPERTIES PARENT_UNIQUE_NAME,HIERARCHY_UNIQUE_NAME
ON COLUMNS
FROM [sales]
WHERE ([Measures].[Amount])
CELL PROPERTIES VALUE, FORMAT_STRING, LANGUAGE, BACK_COLOR, FORE_COLOR, FONT_FLAGS
----------------------------------------------------------
+---------+----------------+----------------+
| Query | olapy | icCube |
+---------+----------------+----------------+
| Query 1 | 0.431155722163 | 0.905942181819 |
| Query 2 | 0.266231503647 | 0.157875911828 |
| Query 3 | 0.443898095905 | 0.138854977305 |
| Query 4 | 0.520997898594 | 0.162916208026 |
| Query 5 | 11.0833824688 | 2.09951531666 |
+---------+----------------+----------------+
---------------- Profiling olapy Query 5 ------------------
Tue Feb 07 11:37:44 2017 C:/Users/Mouadh/Google Drive/olapy-core/olapy-core/micro_bench/__init__.py.profile
3258213 function calls (3217524 primitive calls) in 9.936 seconds
Ordered by: internal time
List reduced from 785 to 15 due to restriction <15>
ncalls tottime percall cumtime percall filename:lineno(function)
799052 0.784 0.000 0.950 0.000 {isinstance}
1 0.410 0.410 9.928 9.928 xmla.py:3123(Execute)
87388 0.344 0.000 0.698 0.000 dtypes.py:74(is_dtype)
59665 0.326 0.000 0.616 0.000 common.py:357(_get_dtype_type)
5716 0.299 0.000 0.551 0.000 algorithms.py:166(safe_sort)
5706 0.227 0.000 1.673 0.000 algorithms.py:264(factorize)
5786/5766 0.198 0.000 1.010 0.000 base.py:142(__new__)
15954 0.183 0.000 0.183 0.000 {method 'reduce' of 'numpy.ufunc' objects}
1440 0.164 0.000 0.251 0.000 groupby.py:4145(loop)
7176 0.162 0.000 0.951 0.000 algorithms.py:1010(take_nd)
137023 0.158 0.000 0.203 0.000 {hasattr}
725 0.153 0.000 0.179 0.000 groupby.py:4199(decons_group_index)
146959/124486 0.124 0.000 0.197 0.000 {len}
75137 0.112 0.000 0.113 0.000 {getattr}
728 0.110 0.000 1.046 0.001 groupby.py:2340(_get_grouper)
\ No newline at end of file
Benchmarks are made with cpu :
Intel(R) Core(TM) i7-2630QM CPU @ 2.00GHz
Query 1 :
SELECT
FROM [temp_cube]
WHERE ([Measures].[Amount])
CELL PROPERTIES VALUE, FORMAT_STRING, LANGUAGE, BACK_COLOR, FORE_COLOR, FONT_FLAGS
----------------------------------------------------------
Query 2 :
SELECT
NON EMPTY Hierarchize(AddCalculatedMembers(DrilldownMember({{{
[table0].[table0].[All table0A].Members}}}, {
[table0].[table0].[table0A].[173]})))
DIMENSION PROPERTIES PARENT_UNIQUE_NAME,HIERARCHY_UNIQUE_NAME
ON COLUMNS
FROM [temp_cube]
WHERE ([Measures].[Amount])
CELL PROPERTIES VALUE, FORMAT_STRING, LANGUAGE, BACK_COLOR, FORE_COLOR, FONT_FLAGS
----------------------------------------------------------
Query 3 :
SELECT NON EMPTY Hierarchize(AddCalculatedMembers(DrilldownMember({{{
[table0].[table0].[All table0A].Members}}}, {
[table0].[table0].[table0A].[497],
[table0].[table0].[table0A].[173],
[table0].[table0].[table0A].[211],
[table0].[table0].[table0A].[95],
[table0].[table0].[table0A].[811],
[table0].[table0].[table0A].[538]
})))
DIMENSION PROPERTIES PARENT_UNIQUE_NAME,HIERARCHY_UNIQUE_NAME
ON COLUMNS
FROM [temp_cube]
WHERE ([Measures].[Amount])
CELL PROPERTIES VALUE, FORMAT_STRING, LANGUAGE, BACK_COLOR, FORE_COLOR, FONT_FLAGS
----------------------------------------------------------
+---------+----------------------+
| Query | olapy execution time |
+---------+----------------------+
| Query 1 | 0.0900395646351 |
| Query 2 | 1.41003381724 |
| Query 3 | 1.52728830868 |
+---------+----------------------+
******************************************************************************
* mondrian with "warehouse" Cube (note the same as olapy but resemble to it) *
* (olapy warehouse"s cube has more rows) *
******************************************************************************
Query 1 :
SELECT
NON EMPTY {[Measures].[Supply Time]}
DIMENSION PROPERTIES PARENT_UNIQUE_NAME ON 0
FROM
[Warehouse]
----------------------------------------------------------
Query 2 :
SELECT
NON EMPTY CrossJoin(Hierarchize({
[Product].[Brand Name].Members,
[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Wine].[Pearl].Children}), {
[Measures].[Supply Time]})
DIMENSION PROPERTIES PARENT_UNIQUE_NAME ON 0
FROM [Warehouse]
----------------------------------------------------------
Query 3 :
SELECT
NON EMPTY CrossJoin(CrossJoin(Hierarchize({
[Product].[Brand Name].Members}),Hierarchize({
[Store Type].[All Store Types],
[Store Type].[All Store Types].Children})),
{[Measures].[Supply Time]})
DIMENSION PROPERTIES PARENT_UNIQUE_NAME ON 0
FROM [Warehouse]
----------------------------------------------------------
+---------+---------------+----------------+
| Query | mondrian | olapy |
+---------+---------------+----------------+
| Query 1 | 18.2991748387 | 0.295552442385 |
| Query 2 | 5.94784549779 | 0.64196827645 |
| Query 3 | 9.70531274535 | 1.7915328602 |
+---------+---------------+----------------+
******************************************
* iCcube v4.8.2 with "sales Excel" Cube *
******************************************
Query 1 :
SELECT
FROM [Sales]
WHERE ([Measures].[Amount])
CELL PROPERTIES VALUE, FORMAT_STRING, LANGUAGE, BACK_COLOR, FORE_COLOR, FONT_FLAGS
----------------------------------------------------------
Query 2 :
SELECT
NON EMPTY Hierarchize(AddCalculatedMembers({DrilldownLevel({
[Geography].[Geo].[All Continent]})}))
DIMENSION PROPERTIES PARENT_UNIQUE_NAME,HIERARCHY_UNIQUE_NAME
ON COLUMNS
FROM [Sales]
WHERE ([Measures].[Amount])
CELL PROPERTIES VALUE, FORMAT_STRING, LANGUAGE, BACK_COLOR, FORE_COLOR, FONT_FLAGS
----------------------------------------------------------
Query 3 :
SELECT
NON EMPTY Hierarchize(AddCalculatedMembers(DrilldownMember({{DrilldownMember({{{
[Geography].[Geography].[All Continent].Members}}}, {
[Geography].[Geography].[Continent].[America],
[Geography].[Geography].[Continent].[Europe]})}}, {
[Geography].[Geography].[Continent].[America].[United States],
[Geography].[Geography].[Continent].[Europe].[France],
[Geography].[Geography].[Continent].[Europe].[Spain]})))
DIMENSION PROPERTIES PARENT_UNIQUE_NAME,HIERARCHY_UNIQUE_NAME
ON COLUMNS
FROM [sales]
WHERE ([Measures].[Amount])
CELL PROPERTIES VALUE, FORMAT_STRING, LANGUAGE, BACK_COLOR, FORE_COLOR, FONT_FLAGS
----------------------------------------------------------
Query 4 :
SELECT
NON EMPTY CrossJoin(Hierarchize(AddCalculatedMembers(DrilldownMember({{DrilldownMember({{{
[Geography].[Geography].[All Continent].Members}}}, {
[Geography].[Geography].[Continent].[America],
[Geography].[Geography].[Continent].[Europe]})}}, {
[Geography].[Geography].[Continent].[America].[United States],
[Geography].[Geography].[Continent].[Europe].[France],
[Geography].[Geography].[Continent].[Europe].[Spain]}))), Hierarchize(AddCalculatedMembers({
[Product].[Product].[Company].Members})))
DIMENSION PROPERTIES PARENT_UNIQUE_NAME,HIERARCHY_UNIQUE_NAME
ON COLUMNS
FROM [sales]
WHERE ([Measures].[Amount])
CELL PROPERTIES VALUE, FORMAT_STRING, LANGUAGE, BACK_COLOR, FORE_COLOR, FONT_FLAGS
----------------------------------------------------------
Query 5 :
SELECT
NON EMPTY CrossJoin(CrossJoin(Hierarchize(AddCalculatedMembers(DrilldownMember({{DrilldownMember({{{
[Geography].[Geography].[All Continent].Members}}}, {
[Geography].[Geography].[Continent].[America],
[Geography].[Geography].[Continent].[Europe]})}}, {
[Geography].[Geography].[Continent].[America].[United States],
[Geography].[Geography].[Continent].[Europe].[France],
[Geography].[Geography].[Continent].[Europe].[Spain]}))),
Hierarchize(AddCalculatedMembers(DrilldownMember({{DrilldownMember({{
[Product].[Product].[Company].Members}}, {
[Product].[Product].[Company].[Crazy Development]})}}, {
[Product].[Product].[Company].[Crazy Development].[olapy]})))),
Hierarchize(AddCalculatedMembers(DrilldownMember({{DrilldownMember({{DrilldownMember({{
[Time].[Time].[Year].Members}}, {
[Time].[Time].[Year].[2010]})}}, {
[Time].[Time].[Year].[2010].[Q2 2010]})}}, {
[Time].[Time].[Year].[2010].[Q2 2010].[May 2010]}))))
DIMENSION PROPERTIES PARENT_UNIQUE_NAME,HIERARCHY_UNIQUE_NAME
ON COLUMNS
FROM [sales]
WHERE ([Measures].[Amount])
CELL PROPERTIES VALUE, FORMAT_STRING, LANGUAGE, BACK_COLOR, FORE_COLOR, FONT_FLAGS
----------------------------------------------------------
+---------+----------------+-----------------+
| Query | olapy | icCube |
+---------+----------------+-----------------+
| Query 1 | 0.281230660283 | 0.621506021932 |
| Query 2 | 0.059574795634 | 0.0932817094385 |
| Query 3 | 0.1762889296 | 0.0877657527287 |
| Query 4 | 0.146335781106 | 0.101121254574 |
| Query 5 | 1.094864808 | 1.28551811198 |
+---------+----------------+-----------------+
---------------- Profiling olapy Query 5 ------------------
Fri Mar 03 15:53:38 2017 C:/Users/Mouadh/Google Drive/olapy-core/olapy-core/micro_bench/__init__.py.profile
101703 function calls (100437 primitive calls) in 0.337 seconds
Ordered by: internal time
List reduced from 658 to 15 due to restriction <15>
ncalls tottime percall cumtime percall filename:lineno(function)
3 0.129 0.043 0.129 0.043 {gc.collect}
19870 0.013 0.000 0.019 0.000 {isinstance}
1 0.010 0.010 0.068 0.068 xmla_execute_tools.py:53(generate_xs0)
2749 0.007 0.000 0.014 0.000 dtypes.py:74(is_dtype)
1 0.006 0.006 0.337 0.337 xmla.py:104(Execute)
9403/8227 0.005 0.000 0.007 0.000 {len}
1580 0.004 0.000 0.008 0.000 common.py:357(_get_dtype_type)
1684 0.004 0.000 0.004 0.000 {method 'format' of 'str' objects}
170/122 0.004 0.000 0.017 0.000 base.py:142(__new__)
225 0.004 0.000 0.023 0.000 algorithms.py:1010(take_nd)
88 0.004 0.000 0.008 0.000 internals.py:2841(_rebuild_blknos_and_blklocs)
3671 0.003 0.000 0.003 0.000 {hasattr}
2985/2975 0.003 0.000 0.003 0.000 {getattr}
485 0.003 0.000 0.003 0.000 {method 'reduce' of 'numpy.ufunc' objects}
1411/1390 0.003 0.000 0.003 0.000 {numpy.core.multiarray.array}
import string
import os
import pandas as pd
import numpy as np
import shutil
from olapy.core.mdx.executor.execute import MdxEngine
CUBE_NAME ="temp_cube"
class CubeGen:
"""
Benchmark olapy query execution
:param number_dimensions: number of dimensions to generate (not including fact)
:param rows_length: number of line in each dimension
:param columns_length: cumber of columns in each dimension
"""
# We have to generate DataFrames and save them to csv format because XmlaProviderService and
# MdxEngine classes use those files
def __init__(self, number_dimensions=1, rows_length=1000, columns_length=2):
self.number_dimensions = number_dimensions
self.rows_length = rows_length
self.columns_length = columns_length
def generate_cube(self, min_val=5, max_val=100):
"""
Generate dimension and fact that follows star schema
:param min_val: minimal value in every dimension
:param max_val: maximal value in every dimension
:return: dict of DataFrames
"""
tables = {}
facts = pd.DataFrame()
for idx, dim in enumerate(range(self.number_dimensions)):
table_name = 'table' + str(idx)
table_values = pd.DataFrame(np.random.randint(min_val, max_val, size=(self.rows_length, self.columns_length)),
columns=list(
table_name + col for col in string.ascii_uppercase[:self.columns_length]))
table_values.index.name = table_name + "_id"
tables[table_name] = table_values.reset_index()
facts[table_name + "_id"] = tables[table_name][table_name + "_id"]
facts['Amount'] = np.random.randint(300, 1000, size=(self.rows_length, 1))
tables['Facts'] = facts
return tables
def generate_csv(self, tables):
"""
generate csv files for the generated DataFrames
:param tables: dict of DataFrames
"""
cube_path = os.path.join(
os.path.abspath(
os.path.join(os.path.dirname(__file__), "..")), MdxEngine.CUBE_FOLDER)
if not os.path.isdir(os.path.join(cube_path, CUBE_NAME)):
os.makedirs(os.path.join(cube_path, CUBE_NAME))
cube_path = os.path.join(cube_path, CUBE_NAME)
for (table_name, table_value) in tables.items():
table_value.to_csv(os.path.join(os.path.join(cube_path, table_name + '.csv')), sep=";", index=False)
def remove_temp_cube(self):
"""
remove the temporary cube
"""
cube_path = os.path.join(
os.path.abspath(
os.path.join(os.path.dirname(__file__), "..")), MdxEngine.CUBE_FOLDER)
if os.path.isdir(os.path.join(cube_path, CUBE_NAME)):
shutil.rmtree(os.path.join(cube_path, CUBE_NAME))
from timeit import Timer
from cube_generator import CUBE_NAME
class MicBench:
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def bench(self, connection, query, cube=CUBE_NAME, number=1):
"""
To be precise, this executes the query statement once, and
then returns the time it takes to execute
:param connection: connection object
:param query: MDX query
:param cube: cube name
:param number: number of times through the loop, defaulting
to one million
:return: float execution time in seconds
"""
return Timer(lambda: connection.Execute(query, Catalog=cube)).timeit(number=number)
from __future__ import absolute_import, division, print_function
import sys
from core.services.xmla import start_server
def main(arg):
'''
Execute xmla provider
:param arg: -c | --console : show logs in server console
:return:
'''
if len(arg) > 1:
if arg[1] in ("-c", "--console"):
start_server(write_on_file=False)
else:
print('invalide argument !')
else:
start_server(write_on_file=True)
if __name__ == "__main__":
main(sys.argv)
class ConditionError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
This diff is collapsed.
This diff is collapsed.
from __future__ import absolute_import, division, print_function
class selectStatement():
def __init__(self, select_statement):
self.select_statement = select_statement
def __str__(self):
return '{}'.format(self.select_statement)
(* The ebnf file is the translation and improvement of microsoft mdx's spec, from https://msdn.microsoft.com/fr-fr/library/windows/desktop/ms717923%28v=vs.85%29.aspx *)
(* The ebnf conversion was guided by http://stackoverflow.com/questions/14922242/how-to-convert-bnf-to-ebnf *)
(* The generation of mdx_parser.py is as easy as: /path/to/grako bnf_mdx.ebnf -o mdx_parser.py *)
MDX_statement =
select_statement;
select_statement::selectStatement = name:'SELECT' [axis_specification_columns:axis_specification]
[',' axis_specification_rows:axis_specification]
'FROM' cube_specification:cube_specification
['WHERE' condition_specification:condition_specification]$
;
axis_specification = [left_accolade] [fetch_form] @:dim_props_place [right_accolade] 'ON' axis_name;
dim_props_place = [left_parentheses] @:dim_props_type [right_parentheses] ;
dim_props_type = [left_accolade] [fetch_type] @:dim_props_op_l1 [right_accolade] [{@:operator dim_props_type}*];
dim_props_op_l1 = [left_parentheses] @:dim_props_op [right_parentheses];
dim_props_op = [left_accolade] @:dim_props_ligne [right_accolade] [{@:comma dim_props_op}*];
dim_props_ligne = [left_parentheses] @:dimension_place [right_parentheses] [{ (@:comma | @:dpoint ) @:dim_props_ligne}*] ;
dimension_place = [left_accolade] @:dim_props [ point @:laste_node] [ @:comma @:dim_props [ point @:laste_node ]] [@:dpoint @:dim_props [ point @:laste_node ]] [right_accolade] | @:dimension_shortcut;
dim_props = {[point] [left_bracket] @:dimension [right_bracket]}* ;
laste_node = "members" | "children" | "Members" | 'ALLMEMBERS' ;
dimension = @:/[a-zA-Z0-9'_'' '',']*/ ;
axis_name = '0' | '1' |'COLUMNS' | 'ROWS' | '_ROWS';
cube_specification = [left_bracket] dimension [right_bracket];
condition_specification = [left_parentheses] {[point] [left_bracket] @:dimension [right_bracket]}* [right_parentheses] ;
digit =
"0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" ;
fetch_type = 'CROSSJOIN' | 'NONEMPTY' | 'union' | 'except' | 'extract' ;
dimension_shortcut = 'all' | 'time' ;
fetch_form = 'NONEMPTY' | 'non_empty' | 'non empty' ;
left_bracket = '[';
right_bracket = ']';
left_parentheses = '(';
right_parentheses = ')';
left_accolade = '{';
right_accolade = '}';
point = '.' ;
dpoint = ':' ;
comma = ',' ;
operator = '+' | '-' | '/' | '*' ;
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This source diff could not be displayed because it is too large. You can view the blob instead.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment