Commit 8ea41cf9 authored by mouadh's avatar mouadh

format docstring

parent 718df38c
......@@ -11,8 +11,8 @@ CUBE_NAME ="temp_cube"
class CubeGen:
"""
Benchmark olapy query execution
Benchmark olapy query execution.
:param number_dimensions: number of dimensions to generate (not including fact)
:param rows_length: number of line in each dimension
:param columns_length: cumber of columns in each dimension
......@@ -28,7 +28,7 @@ class CubeGen:
def generate_cube(self, min_val=5, max_val=100):
"""
Generate dimension and fact that follows star schema
Generate dimension and fact that follows star schema.
:param min_val: minimal value in every dimension
:param max_val: maximal value in every dimension
......@@ -52,10 +52,10 @@ class CubeGen:
@staticmethod
def generate_csv(tables):
"""
generate csv files for the generated DataFrames
Generate csv files for the generated DataFrames.
:param tables: dict of DataFrames
"""
cube_path = os.path.join(
os.path.abspath(
os.path.join(os.path.dirname(__file__), "..")), MdxEngine.CUBE_FOLDER)
......@@ -67,9 +67,7 @@ class CubeGen:
@staticmethod
def remove_temp_cube():
"""
remove the temporary cube
"""
"""Remove the temporary cube."""
cube_path = os.path.join(
os.path.abspath(
os.path.join(os.path.dirname(__file__), "..")), MdxEngine.CUBE_FOLDER)
......
......@@ -4,17 +4,15 @@ from cube_generator import CUBE_NAME
class MicBench:
"""
Micro Benchmark for an mdx query
"""
"""Micro Benchmark for an mdx query."""
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
@staticmethod
def bench(connection, query, cube=CUBE_NAME, number=1):
"""
To be precise, this executes the query statement once, and
then returns the time it takes to execute
To be precise, this executes the query statement once, and then returns the time it takes to execute.
:param connection: connection object
:param query: MDX query
......
......@@ -6,11 +6,12 @@ from core.services.xmla import start_server
def main(arg):
'''
Execute xmla provider
"""
Execute xmla provider.
:param arg: -c | --console : show logs in server console
:return:
'''
"""
if len(arg) > 1:
if arg[1] in ("-c", "--console"):
start_server(write_on_file=False)
......
......@@ -20,11 +20,14 @@ RUNNING_TOX = 'RUNNING_TOX' in os.environ
class MdxEngine:
"""
The principal class for executing a query
The principal class for executing a query.
:param cube_name: It must be under home_directory/olapy-data/CUBE_FOLDER (example : home_directory/olapy-data/cubes/sales)
:param cube_folder: parent cube folder name
:param mdx_query: query to execute
:param sep: separator in the csv files
"""
CUBE_FOLDER = "cubes"
# (before instantiate MdxEngine I need to access cubes information)
csv_files_cubes = []
......@@ -39,13 +42,6 @@ class MdxEngine:
cube_folder=CUBE_FOLDER,
sep=';',
fact_table_name="Facts"):
'''
:param cube_folder: parent cube folder name
:param mdx_query: query to execute
:param sep: separator in the csv files
'''
self.cube_folder = cube_folder
self.cube = cube_name
self.sep = sep
......@@ -70,10 +66,7 @@ class MdxEngine:
@classmethod
def get_cubes_names(cls):
'''
:return: list cubes name that exists in cubes folder (under ~/olapy-data/cubes) and postgres database (if connected)
'''
""":return: list cubes name that exists in cubes folder (under ~/olapy-data/cubes) and postgres database (if connected)."""
# get csv files folders (cubes)
# toxworkdir does not expanduser properly under tox
if RUNNING_TOX:
......@@ -121,7 +114,7 @@ class MdxEngine:
def _get_tables_name(self):
"""
get all tables names
Get all tables names.
:return: list tables names
"""
......@@ -129,11 +122,11 @@ class MdxEngine:
def _load_table_config_file(self, cube_obj):
"""
load tables from config file
Load tables from config file.
:param cube_obj: cubes object
:return: tables dict with table name as key and DataFrame as value
"""
tables = {}
# just one facts table right now
self.facts = cube_obj.facts[0].table_name
......@@ -159,10 +152,10 @@ class MdxEngine:
def _load_tables_csv_files(self):
"""
load tables from csv files
Load tables from csv files.
:return: tables dict with table name as key and dataframe as value
"""
tables = {}
cube = self.get_cube()
for file in os.listdir(cube):
......@@ -177,10 +170,10 @@ class MdxEngine:
def _load_tables_db(self):
"""
load tables from database
Load tables from database.
:return: tables dict with table name as key and dataframe as value
"""
tables = {}
db = MyDB(db=self.cube)
cursor = db.connection.cursor()
......@@ -198,11 +191,10 @@ class MdxEngine:
def load_tables(self):
"""
load all tables { Table name : DataFrame } of the current cube instance
Load all tables { Table name : DataFrame } of the current cube instance.
:return: dict with key as table name and DataFrame as value
"""
config_file_parser = ConfigParser(self.cube_path)
tables = {}
if config_file_parser.config_file_exist(
......@@ -222,10 +214,7 @@ class MdxEngine:
return tables
def get_measures(self):
"""
:return: all numerical columns in facts table
"""
""":return: all numerical columns in facts table."""
# col.lower()[-2:] != 'id' to ignore any id column
return [
col
......@@ -235,12 +224,12 @@ class MdxEngine:
def _construct_star_schema_config_file(self, cube_name, cubes_obj):
"""
Construct star schema DataFrame from configuration file
Construct star schema DataFrame from configuration file.
:param cube_name: cube name (or database name)
:param cubes_obj: cubes object
:return: star schema DataFrame
"""
self.facts = cubes_obj.facts[0].table_name
db = MyDB(db=cube_name)
# load facts table
......@@ -271,11 +260,11 @@ class MdxEngine:
def _construct_star_schema_csv_files(self, cube_name):
"""
Construct star schema DataFrame from csv files
Construct star schema DataFrame from csv files.
:param cube_name: cube name (folder name)
:return: star schema DataFrame
"""
cube = self.get_cube()
# loading facts table
fusion = pd.read_csv(
......@@ -292,11 +281,11 @@ class MdxEngine:
def _construct_star_schema_db(self, cube_name):
"""
Construct star schema DataFrame from database
Construct star schema DataFrame from database.
:param cube_name: cube name (database name)
:return: star schema DataFrame
"""
db = MyDB(db=cube_name)
# load facts table
......@@ -319,12 +308,11 @@ class MdxEngine:
def get_star_schema_dataframe(self, cube_name):
"""
merge all DataFrames as star schema
Merge all DataFrames as star schema.
:param cube_name: cube name with which we want to generate a star schema model
:return: star schema DataFrame
"""
fusion = None
config_file_parser = ConfigParser(self.cube_path)
......@@ -348,7 +336,7 @@ class MdxEngine:
def get_all_tables_names(self, ignore_fact=False):
"""
get list of tables names of the cube
Get list of tables names of the cube.
:param ignore_fact: return all table name with facts table name
:return: all tables names
......@@ -359,7 +347,7 @@ class MdxEngine:
def get_cube(self):
"""
get path to the cube (example /home_directory/olapy-data/cubes)
Get path to the cube (example /home_directory/olapy-data/cubes).
:return: path to the cube
"""
......@@ -369,7 +357,7 @@ class MdxEngine:
@staticmethod
def get_tuples(query, start=None, stop=None):
"""
get all tuples in the mdx query
Get all tuples in the mdx query.
example::
......@@ -399,7 +387,6 @@ class MdxEngine:
:param stop: key-word in the query where we stop (examples start = ON ROWS)
:return: nested list of tuples (see the example)
"""
# french characters
# or use new regex 2017.02.08
regex = "(\[[\w+\d ]+\](\.\[[\w+\d\.\,\s\_\-\é\ù\è\ù\û\ü\ÿ\\\à\â\æ\ç\é\è\ê\ë\ï\î" \
......@@ -422,12 +409,11 @@ class MdxEngine:
# TODO temporary function
def decorticate_query(self, query):
"""
get all tuples that exists in the MDX Query by axes
Get all tuples that exists in the MDX Query by axes.
:param query: MDX Query
:return: dict of axis as key and tuples as value
"""
tuples_on_mdx_query = self.get_tuples(query)
on_rows = []
on_columns = []
......@@ -468,7 +454,7 @@ class MdxEngine:
@staticmethod
def change_measures(tuples_on_mdx):
"""
set measures to which exists in the query
Set measures to which exists in the query.
:param tuples_on_mdx: list of tuples:
......@@ -478,7 +464,6 @@ class MdxEngine:
:return: measures column's names
"""
return [
tple[-1] for tple in tuples_on_mdx if tple[0].upper() == "MEASURES"
]
......@@ -486,8 +471,7 @@ class MdxEngine:
def get_tables_and_columns(self, tuple_as_list):
# TODO update docstring
"""
get used dimensions and columns in the MDX Query (useful for DataFrame -> xmla response transformation)
Get used dimensions and columns in the MDX Query (useful for DataFrame -> xmla response transformation).
:param tuple_as_list: list of tuples
......@@ -501,8 +485,7 @@ class MdxEngine:
Product : ['Company']
Facts : ['Amount','Count']
}
"""
"""
axes = {}
# TODO optimize
for axis, tuples in tuple_as_list.items():
......@@ -530,9 +513,7 @@ class MdxEngine:
def execute_one_tuple(self, tuple_as_list, Dataframe_in, columns_to_keep):
"""
filter a DataFrame (Dataframe_in) with one tuple
Filter a DataFrame (Dataframe_in) with one tuple.
Example ::
......@@ -591,7 +572,7 @@ class MdxEngine:
@staticmethod
def add_missed_column(dataframe1, dataframe2):
"""
solution to fix BUG : https://github.com/pandas-dev/pandas/issues/15525
Solution to fix BUG : https://github.com/pandas-dev/pandas/issues/15525
if you want to concat two dataframes with different columns like :
......@@ -671,7 +652,7 @@ class MdxEngine:
def update_columns_to_keep(self, tuple_as_list, columns_to_keep):
"""
if we have multiple dimensions, with many columns like:
If we have multiple dimensions, with many columns like:
columns_to_keep :
......@@ -717,7 +698,6 @@ class MdxEngine:
:return: updated columns_to_keep
"""
if len(
tuple_as_list
) == 3 and tuple_as_list[-1] in self.tables_loaded[tuple_as_list[0]].columns:
......@@ -731,7 +711,7 @@ class MdxEngine:
def execute_mdx(self):
"""
execute an MDX Query
Execute an MDX Query.
usage ::
......@@ -747,7 +727,6 @@ class MdxEngine:
}
"""
# use measures that exists on where or insides axes
query_axes = self.decorticate_query(self.mdx_query)
if self.change_measures(query_axes['all']):
......
......@@ -7,15 +7,12 @@ from .gen_parser.models import SelectStatement
class MdxParser:
"""
parse the mdx query and split it into well-defined parts
"""
"""Parse the mdx query and split it into well-defined parts."""
START = 'MDX_statement'
@staticmethod
def parsing_mdx_query(axis, query):
'''
Split the query into axis
"""Split the query into axis.
**Example**::
......@@ -47,7 +44,7 @@ class MdxParser:
:param query: MDX Query
:param axis: column | row | cube | condition | all
:return: Tuples in the axis, from the MDX query
'''
"""
model = MdxParserGen(semantics=ModelBuilderSemantics(
types=[SelectStatement]))
ast = model.parse(query, rule_name=MdxParser.START, ignorecase=True)
......
......@@ -9,7 +9,7 @@ from .models import Cube, Dimension, Facts
class ConfigParser:
"""
Parse olapy config files
Parse olapy config files.
Config file used if you want to show only some measures, dimensions, columns... in excel
......@@ -114,7 +114,6 @@ class ConfigParser:
</cubes>
"""
def __init__(self, cube_path, file_name='cubes-config.xml'):
"""
......@@ -126,7 +125,7 @@ class ConfigParser:
def config_file_exist(self):
"""
check whether the config file exists or not
Check whether the config file exists or not.
:return: True | False
"""
......@@ -134,11 +133,10 @@ class ConfigParser:
def xmla_authentication(self):
"""
check if excel need authentication to access cubes or not. (xmla_authentication tag in the config file)
Check if excel need authentication to access cubes or not. (xmla_authentication tag in the config file).
:return: True | False
"""
with open(os.path.join(self.cube_path, self.file_name)) as config_file:
parser = etree.XMLParser()
......@@ -152,11 +150,10 @@ class ConfigParser:
def get_cubes_names(self):
"""
get all cubes names in the config file
Get all cubes names in the config file.
:return: dict of cube name as key and cube source as value (csv or postgres) (right now only postgres is supported)
"""
with open(os.path.join(self.cube_path, self.file_name)) as config_file:
parser = etree.XMLParser()
......@@ -172,11 +169,10 @@ class ConfigParser:
def construct_cubes(self):
"""
construct cube (with it dimensions) and facts from the config file
Construct cube (with it dimensions) and facts from the config file.
:return: list of Cubes instance
"""
if self.config_file_exist():
try:
with open(os.path.join(self.cube_path,
......
......@@ -2,10 +2,7 @@ import psycopg2 as pg
class MyDB(object):
"""
Connect to sql database (postgres only right now)
"""
"""Connect to sql database (postgres only right now)."""
def __init__(self,
username='postgres',
password='root',
......
class Facts:
"""
Facts class used to encapsulate config file attributes
"""
"""Facts class used to encapsulate config file attributes."""
def __init__(self, **kwargs):
"""
......@@ -23,31 +21,24 @@ class Facts:
class Dimension:
"""
Dimension class used to encapsulate config file attributes
"""
"""Dimension class used to encapsulate config file attributes."""
def __init__(self, **kwargs):
"""
:param kwargs: {
name : 'something',
displayName : 'something',
columns :
{ name : 'something' }
}
}
"""
self.__dict__.update(kwargs)
def __str__(self):
return str(self.__dict__)
class Cube:
"""
Cube class used to encapsulate config file attributes
"""
"""Cube class used to encapsulate config file attributes."""
def __init__(self, **kwargs):
"""
......
......@@ -9,9 +9,7 @@ from spyne import ComplexModel, Integer, Unicode, XmlAttribute
# thus xmla requests from excel can be reached
class Tuple(object):
"""
Tuple description (used by spyne)
"""
"""Tuple description (used by spyne)."""
def __init__(self, Hierarchy, UName, Caption, LName, LNum, DisplayInfo,
PARENT_UNIQUE_NAME, HIERARCHY_UNIQUE_NAME, Value):
......@@ -42,9 +40,8 @@ class Tuple(object):
class Property(ComplexModel):
"""
Property description (used by spyne)
"""
"""Property description (used by spyne)."""
__namespace__ = "urn:schemas-microsoft-com:xml-analysis"
_type_info = {
'LocaleIdentifier': Unicode,
......@@ -67,9 +64,8 @@ class Property(ComplexModel):
class Restriction(ComplexModel):
"""
Restriction description (used by spyne)
"""
"""Restriction description (used by spyne)."""
__namespace__ = "urn:schemas-microsoft-com:xml-analysis"
_type_info = {
'CATALOG_NAME': Unicode,
......@@ -89,50 +85,44 @@ class Restriction(ComplexModel):
class Session(ComplexModel):
"""
Session description (used by spyne)
"""
"""Session description (used by spyne)."""
__namespace__ = "urn:schemas-microsoft-com:xml-analysis"
SessionId = XmlAttribute(Unicode)
class Restrictionlist(ComplexModel):
"""
Restriction description (used by spyne)
"""
"""Restriction description (used by spyne)."""
__namespace__ = "urn:schemas-microsoft-com:xml-analysis"
__type_name__ = "Restrictions"
RestrictionList = Restriction
class Propertielist(ComplexModel):
"""
Properties description (used by spyne)
"""
"""Properties description (used by spyne)."""
__namespace__ = "urn:schemas-microsoft-com:xml-analysis"
__type_name__ = "Properties"
PropertyList = Property
class Command(ComplexModel):
"""
Command description (used by spyne)
"""
"""Command description (used by spyne)."""
_type_info = {'Statement': Unicode,}
class ExecuteRequest(ComplexModel):
"""
Execute description (used by spyne)
"""
"""Execute description (used by spyne)."""
Command = Command
Properties = Propertielist
class DiscoverRequest(ComplexModel):
"""
Discover description (used by spyne)
"""
"""Discover description (used by spyne)."""
RequestType = Unicode
Restrictions = Restrictionlist
Properties = Propertielist
......@@ -22,24 +22,21 @@ from .xmla_execute_xsds import execute_xsd
class XmlaProviderService(ServiceBase):
"""
The main class to active soap services between xmla clients and olapy.
IMPORTANT : all xsd and soap responses are written manually (not generated by Spyne lib)
because Spyne doesn't support encodingStyle and other namespaces required by Excel,
check it <http://stackoverflow.com/questions/25046837/the-encodingstyle-attribute-is-not-allowed-in-spyne>
The main class to active soap services between xmla clients and olapy
"""
# IMPORTANT : all xsd and soap responses are written manually (not generated by Spyne lib)
# because Spyne doesn't support encodingStyle and other namespaces required by Excel,
# check it <http://stackoverflow.com/questions/25046837/the-encodingstyle-attribute-is-not-allowed-in-spyne>
# we have to instantiate XmlaDiscoverTools and declare variables
# as class variable so we can access them in Discovery and Execute functions
# this problem is related with Spyne architecture, NO CHOICE
we have to instantiate XmlaDiscoverTools and declare variables
as class variable so we can access them in Discovery and Execute functions
this problem is related with Spyne architecture, NO CHOICE
# NOTE : some variables and functions names shouldn't respect naming convention here
# because we need to create the xmla response (generated by spyne) with the same variable names,
# and then, xmla requests from excel can be reached
# thus make life easier
NOTE : some variables and functions names shouldn't respect naming convention here
because we need to create the xmla response (generated by spyne) with the same variable names,
and then, xmla requests from excel can be reached
thus make life easier.
"""
discover_tools = XmlaDiscoverTools()
sessio_id = discover_tools.session_id
......@@ -52,7 +49,7 @@ class XmlaProviderService(ServiceBase):
_throws=InvalidCredentialsError)
def Discover(ctx, request):
"""
the first principle function of xmla protocol
The first principle function of xmla protocol.
:param request: Discover function must take 3 argument ( JUST 3 ! ) RequestType,
Restrictions and Properties , we encapsulate them in DiscoverRequest object
......@@ -62,7 +59,6 @@ class XmlaProviderService(ServiceBase):
"""
# ctx is the 'context' parameter used by Spyne
# (which cause problems when we want to access xmla_provider instantiation variables)
discover_tools = XmlaProviderService.discover_tools
ctx.out_header = Session(SessionId=str(XmlaProviderService.sessio_id))
......@@ -141,14 +137,13 @@ class XmlaProviderService(ServiceBase):
_out_header=Session)
def Execute(ctx, request):
"""
the second principle function of xmla protocol
The second principle function of xmla protocol.
:param request: Execute function must take 2 argument ( JUST 2 ! ) Command and Properties,
we encapsulate them in ExecuteRequest object
:return: Execute response in xml format
"""
ctx.out_header = Session(SessionId=str(XmlaProviderService.sessio_id))
if request.Command.Statement == '':
......@@ -229,7 +224,7 @@ wsgi_application = WsgiApplication(application)
def start_server(write_on_file=False):
"""
start the xmla server
Start the xmla server.
:param write_on_file:
- False -> server logs will be displayed on console
......
......@@ -19,9 +19,7 @@ from .xmla_discover_xsds import (
# TODO clean
class XmlaDiscoverTools():
"""
XmlaDiscoverTools for generating xmla discover responses
"""
"""XmlaDiscoverTools for generating xmla discover responses."""
def __init__(self):
# right now the catalogue_name and cube name are the same
......@@ -36,12 +34,11 @@ class XmlaDiscoverTools():
def change_catalogue(self, new_catalogue):
"""
if you change the catalogue (cube) in any request, we have to instantiate the MdxEngine with the new catalogue
If you change the catalogue (cube) in any request, we have to instantiate the MdxEngine with the new catalogue.
:param new_catalogue: catalogue name
:return: new instance of MdxEngine with new star_schema_DataFrame and other variables
"""
#
if self.selected_catalogue != new_catalogue:
self.selected_catalogue = new_catalogue
self.executer = MdxEngine(new_catalogue)
......
......@@ -5,9 +5,7 @@ from collections import OrderedDict
class XmlaExecuteTools():
"""
XmlaExecuteTools for generating xmla execute responses
"""
"""XmlaExecuteTools for generating xmla execute responses."""
def __init__(self, executer):
self.executer = executer
......@@ -15,7 +13,7 @@ class XmlaExecuteTools():
@staticmethod
def split_dataframe(mdx_execution_result):
"""
Split DataFrame into multiple ones by dimension
Split DataFrame into multiple ones by dimension.
example::
......@@ -50,7 +48,6 @@ class XmlaExecuteTools():
:param mdx_execution_result: MdxEngine.execute_mdx() result
:return: dict with multiple DataFrame
"""
# TODO new version with facts as splited df maybe
return OrderedDict(
(key, mdx_execution_result['result'].reset_index()[list(value)])
......@@ -60,7 +57,7 @@ class XmlaExecuteTools():
@staticmethod
def get_tuple_without_nan(tuple):
"""
remove nan from tuple.
Remove nan from tuple.
example:
......@@ -84,6 +81,7 @@ class XmlaExecuteTools():
mdx_query_axis='all',
axis="Axis0"):
"""
:param mdx_execution_result:
:param splited_df:
:return:
......@@ -285,7 +283,7 @@ class XmlaExecuteTools():
# TODO maybe fusion with generate xs0 for less iteration
def generate_cell_data(self, mdx_execution_result):
"""
examle of CellData::
Examle of CellData::
<Cell CellOrdinal="0">
<Value xsi:type="xsi:long">768</Value>
......@@ -338,7 +336,7 @@ class XmlaExecuteTools():
def generate_axes_info_slicer(self, mdx_execution_result):
"""
Not used dimensions
Not used dimensions.
example AxisInfo::
......@@ -365,7 +363,6 @@ class XmlaExecuteTools():
:param mdx_execution_result: mdx_execute() result
:return: AxisInfo as string
"""
all_dimensions_names = self.executer.get_all_tables_names(
ignore_fact=True)
all_dimensions_names.append('Measures')
......@@ -413,7 +410,7 @@ class XmlaExecuteTools():
mdx_query_axis='columns',
Axis='Axis0'):
"""
example AxisInfo::
Example AxisInfo::
<AxesInfo>
......@@ -445,7 +442,6 @@ class XmlaExecuteTools():
:param Axis: Axis0 or Axis1 (Axis0 by default)
:return:
"""
hierarchy_info = ""
# measure must be written at the top
if self.executer.facts in mdx_execution_result['columns_desc'][mdx_query_axis].keys(
......@@ -488,10 +484,10 @@ class XmlaExecuteTools():
def generate_axes_info(self, mdx_execution_result):
"""
:param mdx_execution_result: mdx_execute() result
:return: AxisInfo as string
"""
if mdx_execution_result['columns_desc']['rows']:
return """
{0}
......@@ -520,7 +516,7 @@ class XmlaExecuteTools():
def generate_slicer_axis(self, mdx_execution_result):
"""
example SlicerAxis::
Example SlicerAxis::
<Axis name="SlicerAxis">
......
......@@ -9,19 +9,17 @@ from ..core.mdx.parser.parse import MdxParser
class QueryForm(Form):
"""
Query Form
"""
"""Query Form."""
mdx = TextAreaField(
"MDX Query",
validators=[DataRequired(message="Please enter the MDX Query")])
def validate(self):
"""
Valide
"""Valide.
:return:
"""
parser = MdxParser()
if self.mdx.data:
try:
......@@ -39,9 +37,8 @@ class QueryForm(Form):
class LoginForm(Form):
"""
Loging Form
"""
"""Loging Form."""
username = StringField(
'Your username:',
validators=[DataRequired(message="Please enter the Username")])
......
......@@ -7,9 +7,7 @@ from os.path import expanduser
class Logs:
"""
class responsible of managing logs (users , mdx and xmla logs)
"""
"""Class responsible of managing logs (users , mdx and xmla logs)."""
def __init__(self, file_name):
self.file_name = file_name + ".log"
......
......@@ -4,9 +4,7 @@ import six
class IFrame(object):
"""
Frame in which we can drag and drop our columns
"""
"""Frame in which we can drag and drop our columns."""
iframe = """
<iframe
......@@ -20,7 +18,7 @@ class IFrame(object):
def __init__(self, src, width, height, **kwargs):
"""
IFrame
Iframe
:param src:
:param width:
:param height:
......@@ -32,7 +30,7 @@ class IFrame(object):
self.params = kwargs
def _repr_html_(self):
"""return the embed iframe"""
"""return the embed iframe."""
if self.params:
# try:
# from urllib.parse import urlencode # Py 3
......@@ -112,7 +110,7 @@ template = """
def pivot_ui(df, outfile_path="pivottablejs.html", width="100%", height="500"):
"""
Create pivot table html page relative to DataFrame
Create pivot table html page relative to DataFrame.
:param df: the DataFrame
:param outfile_path: html page name (can be the path also)
......
......@@ -8,20 +8,17 @@ import plotly.graph_objs as go
class Graphs:
"""
Manage graphs for the web clients
"""
"""Manage graphs for the web clients."""
# TODO remove this , ( right know this is just a demo with sales cube )
@staticmethod
def generate_graphes(dataframe):
"""
Generate graphs for a pandas DataFrame, if you want to add graphs, you have to do it in this function
Generate graphs for a pandas DataFrame, if you want to add graphs, you have to do it in this function.
:param dataframe: the DataFrame
:return: dict of ids as keys and json graphs as values
"""
x = []
x_pie = []
y = []
......
......@@ -35,9 +35,7 @@ log_mdx = Logs('mdx')
class Nod:
"""
class for maintaining dimensions hierarchies
"""
"""Class for maintaining dimensions hierarchies."""
def __init__(self, text, id, parent):
self.text = text
......@@ -52,7 +50,7 @@ class Nod:
def generate_tree_levels():
"""
build table's levels to use them in the page's TreeView
Build table's levels to use them in the page's TreeView.
:return: dict of levels
"""
......@@ -92,7 +90,7 @@ def generate_tree_levels():
@login_manager.user_loader
def load_user(userid):
"""
load user with specific id
Load user with specific id.
:param userid: user id
:return: user
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment