Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
M
mariadb
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Kirill Smelkov
mariadb
Commits
5ec05822
Commit
5ec05822
authored
Apr 21, 2006
by
jimw@mysql.com
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Delete some more NDB cruft.
parent
cdb5fada
Changes
15
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
15 changed files
with
0 additions
and
3563 deletions
+0
-3563
storage/ndb/home/bin/Linuxmkisofs
storage/ndb/home/bin/Linuxmkisofs
+0
-0
storage/ndb/home/bin/Solarismkisofs
storage/ndb/home/bin/Solarismkisofs
+0
-0
storage/ndb/home/bin/cvs2cl.pl
storage/ndb/home/bin/cvs2cl.pl
+0
-1865
storage/ndb/home/bin/fix-cvs-root
storage/ndb/home/bin/fix-cvs-root
+0
-17
storage/ndb/home/bin/import-from-bk.sh
storage/ndb/home/bin/import-from-bk.sh
+0
-158
storage/ndb/home/bin/ndb_deploy
storage/ndb/home/bin/ndb_deploy
+0
-27
storage/ndb/home/bin/ndbdoxy.pl
storage/ndb/home/bin/ndbdoxy.pl
+0
-184
storage/ndb/home/bin/ngcalc
storage/ndb/home/bin/ngcalc
+0
-78
storage/ndb/home/bin/parseConfigFile.awk
storage/ndb/home/bin/parseConfigFile.awk
+0
-98
storage/ndb/home/bin/setup-test.sh
storage/ndb/home/bin/setup-test.sh
+0
-272
storage/ndb/home/bin/signallog2html.lib/signallog2list.awk
storage/ndb/home/bin/signallog2html.lib/signallog2list.awk
+0
-102
storage/ndb/home/bin/signallog2html.lib/uniq_blocks.awk
storage/ndb/home/bin/signallog2html.lib/uniq_blocks.awk
+0
-29
storage/ndb/home/bin/signallog2html.sh
storage/ndb/home/bin/signallog2html.sh
+0
-349
storage/ndb/home/bin/stripcr
storage/ndb/home/bin/stripcr
+0
-90
storage/ndb/home/lib/funcs.sh
storage/ndb/home/lib/funcs.sh
+0
-294
No files found.
storage/ndb/home/bin/Linuxmkisofs
deleted
100755 → 0
View file @
cdb5fada
File deleted
storage/ndb/home/bin/Solarismkisofs
deleted
100755 → 0
View file @
cdb5fada
File deleted
storage/ndb/home/bin/cvs2cl.pl
deleted
100755 → 0
View file @
cdb5fada
This diff is collapsed.
Click to expand it.
storage/ndb/home/bin/fix-cvs-root
deleted
100755 → 0
View file @
cdb5fada
#! /bin/sh
# change all CVS/Root to current CVSROOT
[
"
$CVSROOT
"
]
||
{
echo
"no CVSROOT in environment"
>
&2
;
exit
1
;
}
echo
"changing all CVS/Root files under
`
pwd
`
"
sleep
1
find
.
-path
'*/CVS/Root'
-print
|
while
read
file
;
do
echo
"
$file
"
chmod
+w
$file
||
exit
1
echo
$CVSROOT
>
$file
||
exit
1
done
echo
"done"
storage/ndb/home/bin/import-from-bk.sh
deleted
100755 → 0
View file @
cdb5fada
#! /bin/sh
# XXX does not delete files
# XXX does not handle nested new dirs
# this script screams for perl, no time now
# look for bk2cvs on the net
PATH
=
/usr/local/bin:
$PATH
;
export
PATH
LD_LIBRARY_PATH
=
/usr/local/lib:
$LD_LIBRARY_PATH
;
export
LD_LIBRARY_PATH
batch
=
n
if
[
"
$1
"
=
"-batch"
]
;
then
batch
=
y
shift
fi
say
()
{
echo
"
$*
"
}
die
()
{
case
$#
in
0
)
set
--
"command failed"
;;
esac
say
"
$*
-- aborted"
>
&2
exit
1
}
usage
()
{
die
"usage:
$0
[-batch] top -- copy from mysql/ndb to another NDB_TOP"
}
doit
()
{
cmd
=
"
$*
"
if
[
$batch
=
n
]
;
then
echo
-n
"
$cmd
[y]"
read
junk
sh
-c
"
$cmd
"
return
0
else
echo
"
$cmd
"
sh
-c
"
$cmd
"
return
$?
fi
}
say
"======================"
say
"
`
date
`
"
case
$#
in
1
)
[
-d
$1
/src/CVS
]
||
die
"
$1
is not an NDB_TOP"
top
=
$1
;;
*
)
usage
;;
esac
if
!
fgrep ndb_kernel_version.h
$top
/include/kernel/CVS/Entries
>
/dev/null 2>&1
;
then
die
"
$top
is not an NDB_TOP"
fi
if
find
$top
-path
'*/CVS/Tag'
-print
|
grep
.
>
/dev/null
;
then
die
"
$top
: contains CVS/Tag files, not accepted"
fi
if
[
!
-f
include/SCCS/s.ndb_version.h
]
;
then
die
"current dir (
$PWD
) is not an NDB_TOP"
fi
doit
"bk pull"
||
exit
1
doit
"bk -r clean"
doit
"bk -r get -q"
files
=
`
bk
-r
.
sfiles
-g
|
fgrep
-v
' '
|
fgrep
-v
/.cvsignore
`
n
=
0
files2
=
for
f
in
$files
;
do
if
[
!
-f
$f
]
;
then
die
"
$f
: no such file"
fi
if
[
-w
$f
]
;
then
say
"
$f
: is writable, accept anyway"
fi
files2
=
"
$files2
$f
"
n
=
$((
n+1
))
done
files
=
$files2
say
"
$n
files..."
adddirs
=
addfiles
=
updfiles
=
for
f
in
$files
;
do
d
=
`
dirname
$f
`
b
=
`
basename
$f
`
if
[
!
-f
$top
/
$d
/CVS/Entries
]
;
then
found
=
n
for
x
in
$adddirs
;
do
if
[
$x
=
$d
]
;
then
found
=
y
;
break
;
fi
done
if
[
$found
=
n
]
;
then
say
"
$d
: to create dir"
adddirs
=
"
$adddirs
$d
"
fi
addfiles
=
"
$addfiles
$f
"
say
"
$f
: to create"
elif
!
fgrep
"/
$b
/"
$top
/
$d
/CVS/Entries
>
/dev/null
;
then
addfiles
=
"
$addfiles
$f
"
say
"
$f
: to create"
else
cmp
$f
$top
/
$f
>
/dev/null
case
$?
in
0
)
continue
;;
1
)
;;
*
)
die
"
$f
: unknown error"
;;
esac
updfiles
=
"
$updfiles
$f
"
say
"
$f
: to update"
fi
done
for
d
in
$adddirs
;
do
doit
"cd
$top
&& mkdir -p
$d
"
||
die
done
for
f
in
$addfiles
$updfiles
;
do
doit
"cp -fp
$f
$top
/
$f
"
||
die
done
for
d
in
$adddirs
;
do
# fix 1 level up
d2
=
`
dirname
$d
`
if
[
!
-d
$top
/
$d2
/CVS
]
;
then
doit
"cd
$top
&& cvs add
$d2
"
||
die
fi
doit
"cd
$top
&& cvs add
$d
"
||
die
done
for
f
in
$addfiles
;
do
kb
=
if
echo
$f
| perl
-nle
"print(-B
$_
)"
|
grep
1
>
/dev/null
;
then
kb
=
"-kb"
fi
doit
"cd
$top
&& cvs add
$kb
$f
"
||
die
done
tag
=
import_bk_
`
date
+%Y_%m_%d
`
doit
"cd
$top
&& cvs commit -m
$tag
"
||
die
doit
"cd
$top
&& cvs tag -F
$tag
"
||
die
env
=
"NDB_TOP=
$top
; export NDB_TOP"
env
=
"
$env
; USER_FLAGS='-DAPI_TRACE -fmessage-length=0'; export USER_FLAGS"
doit
"
$env
; cd
$top
&& ./configure"
doit
"
$env
; cd
$top
&& sh config/GuessConfig.sh"
doit
"
$env
; cd
$top
&& make clean nuke-deps vim-tags"
doit
"
$env
; cd
$top
&& make"
||
die
say
"imported ok"
storage/ndb/home/bin/ndb_deploy
deleted
100755 → 0
View file @
cdb5fada
#!/bin/sh
if
[
$#
-eq
0
]
then
for
i
in
$DEPLOY_DST
do
rsync
-r
-v
--exclude
'*.a'
$NDB_TOP
/bin
$NDB_TOP
/lib
$i
/
done
else
while
[
$#
-gt
0
]
do
arg
=
$1
shift
;
if
[
`
echo
$arg
|
grep
-c
lib
`
-eq
0
]
then
dst
=
bin/
else
dst
=
lib/
fi
for
i
in
$DEPLOY_DST
do
rsync
-v
$arg
$i
/
$dst
done
done
fi
storage/ndb/home/bin/ndbdoxy.pl
deleted
100755 → 0
View file @
cdb5fada
#!/usr/local/bin/perl
#
# ndbdoxy.pl Executes doxygen on a checked out version of NDB Cluster
#
# Written by Lars Thalmann, 2003.
use
strict
;
umask
000
;
# -----------------------------------------------------------------------------
# Settings
# -----------------------------------------------------------------------------
my
$root
=
"
/home/elathal/public_html/cvsdoxy
";
$ENV
{
LD_LIBRARY_PATH
}
=
"
/usr/local/lib:/opt/as/local/lib
";
$ENV
{
LD_LIBRARY_PATH
}
=
$ENV
{
LD_LIBRARY_PATH
}
.
"
:/opt/as/forte6/SUNWspro/lib
";
$ENV
{
PATH
}
=
$ENV
{
PATH
}
.
"
:/usr/local/bin:/opt/as/local/bin
";
$ENV
{
PATH
}
=
$ENV
{
PATH
}
.
"
:/opt/as/local/teTeX/bin/sparc-sun-solaris2.8
";
my
$DOXYGEN
=
"
doxygen
";
my
$PDFLATEX
=
"
pdflatex
";
my
$MAKEINDEX
=
"
makeindex
";
# -----------------------------------------------------------------------------
# Argument handling
# -----------------------------------------------------------------------------
if
(
@ARGV
!=
3
)
{
print
<<
END
;
Usage:
ndbdoxy
.
pl
<
module
>
<
title
>
<
version
>
where
<
module
>
is
cvsdoxy
module
to
doxgenify
<
title
>
is
title
of
report
<
version
>
is
version
of
NDB
Cluster
END
exit
;
}
my
$module
=
$ARGV
[
0
];
my
$title
=
$ARGV
[
1
];
my
$version
=
$ARGV
[
2
];
my
$destdir
=
"
.
";
# -----------------------------------------------------------------------------
# Execute Doxygen -g
# -----------------------------------------------------------------------------
if
(
-
r
"
${root}
/doxyfiles/
${module}
.doxyfile
")
{
system
("
cd
${destdir}
;
\
cp
${root}
/doxyfiles/
${module}
.doxyfile Doxyfile
");
}
elsif
(
-
r
"
${root}
/doxyfiles/default.doxyfile
")
{
system
("
cd
${destdir}
;
\
cp
${root}
/doxyfiles/default.doxyfile Doxyfile
");
}
else
{
system
("
cd
${destdir}
;
$DOXYGEN
-g
");
}
# -----------------------------------------------------------------------------
# HTML Footer
# -----------------------------------------------------------------------------
if
(
-
r
"
${root}
/doxyfiles/htmlfooter
")
{
system
("
cd
${destdir}
;
\
cp
${root}
/doxyfiles/htmlfooter footer.html
");
open
(
INFILE
,
"
<
${destdir}
/footer.html
")
or
die
"
Error opening
${destdir}
/footer.html.
\n
";
open
(
OUTFILE
,
"
>
${destdir}
/footer.html.new
")
or
die
"
Error opening
${destdir}
/footer.html.new.
\n
";
while
(
<
INFILE
>
)
{
if
(
/(.*)DATE(.*)$/
)
{
print
OUTFILE
$1
.
localtime
()
.
$2
;
}
else
{
print
OUTFILE
;
}
}
close
INFILE
;
close
OUTFILE
;
system
("
mv
${destdir}
/footer.html.new
${destdir}
/footer.html
");
}
else
{
print
("
Warning: No
${root}
/doxyfiles/
${module}
.htmlfooter
");
}
# -----------------------------------------------------------------------------
# Execute Doxygen
# -----------------------------------------------------------------------------
system
("
cd
${destdir}
;
$DOXYGEN
");
# -----------------------------------------------------------------------------
# Change a little in refman.tex
# -----------------------------------------------------------------------------
open
(
INFILE
,
"
<
${destdir}
/latex/refman.tex
")
or
die
"
Error opening
${destdir}
/latex/refman.tex.
\n
";
open
(
OUTFILE
,
"
>
${destdir}
/latex/refman.tex.new
")
or
die
"
Error opening
${destdir}
/latex/refman.tex.new.
\n
";
while
(
<
INFILE
>
)
{
if
(
/(.*)Reference Manual(.*)$/
)
{
print
OUTFILE
$1
.
"
\\
mbox{}
\\
vspace{-3cm}
\\
mbox{}
"
.
"
\\
hrule
\\
bigskip
\\
bigskip
\\
bigskip
\\
bigskip
"
.
"
\\
Huge{
"
.
$title
.
"
}
"
.
$2
;
}
elsif
(
/(.*)Generated by Doxygen 1.2.1[0-9](.*)$/
)
{
print
OUTFILE
$1
.
"
\\
begin{center}
"
.
"
\\
LARGE{MySQL AB}
"
.
"
\\
end{center}
"
.
"
\\
hfill
\\
bigskip
\\
bigskip
\\
bigskip
\\
hrule
"
.
"
\\
bigskip
\\
bigskip
\\
bigskip
\\
bigskip
\\
bigskip
"
.
"
\\
bigskip
\\
bigskip
\\
bigskip
\\
bigskip
\\
bigskip
"
.
"
\\
bigskip
\\
bigskip NDB Cluster Release
"
.
$version
.
"
\\
bigskip
\\
bigskip
\\
bigskip
\\
bigskip
\\
bigskip
\\
hfill
"
.
$2
;
}
elsif
(
/\\chapter\{File Index\}/
)
{
print
OUTFILE
"
\
%
\\
chapter{File Index}
\n
";
}
elsif
(
/\\input{files}/
)
{
print
OUTFILE
"
\
%
\\
input{files}
\n
";
}
elsif
(
/\\chapter\{Page Index\}/
)
{
print
OUTFILE
"
\
%
\\
chapter{Page Index}
\n
";
}
elsif
(
/\\input{pages}/
)
{
print
OUTFILE
"
\
%
\\
input{pages}
\n
";
}
else
{
print
OUTFILE
;
}
}
close
INFILE
;
close
OUTFILE
;
system
("
mv
${destdir}
/latex/refman.tex.new
${destdir}
/latex/refman.tex
");
# -----------------------------------------------------------------------------
# Change a little in doxygen.sty
# -----------------------------------------------------------------------------
open
(
INFILE
,
"
<
${destdir}
/latex/doxygen.sty
")
or
die
"
Error opening INFILE.
\n
";
open
(
OUTFILE
,
"
>
${destdir}
/latex/doxygen.sty.new
")
or
die
"
Error opening OUTFILE.
\n
";
while
(
<
INFILE
>
)
{
if
(
/\\rfoot/
)
{
print
OUTFILE
"
\\
rfoot[
\\
fancyplain{}{
\\
bfseries
\\
small
\\
copyright~Copyright 2003 MySQL AB
\\
hfill support-cluster
\
@mysql
.com}]{}
\n
";
}
elsif
(
/\\lfoot/
)
{
print
OUTFILE
"
\\
lfoot[]{
\\
fancyplain{}{
\\
bfseries
\\
small support-cluster
\
@mysql
.com
\\
hfill
\\
copyright~Copyright 2003 MySQL AB}}
\n
";
}
else
{
print
OUTFILE
;
}
}
close
INFILE
;
close
OUTFILE
;
system
("
mv
${destdir}
/latex/doxygen.sty.new
${destdir}
/latex/doxygen.sty
");
# -----------------------------------------------------------------------------
# Other
# -----------------------------------------------------------------------------
#system("cd ${root}/tmp/${module}; \
# mkdir html.tar; \
# cd html.tar; \
# cp -r ../html ${module}; \
# tar cf ${module}.html.tar ${module}; \
# /usr/local/bin/gzip ${module}.html.tar; \
# /bin/rm -rf ${root}/tmp/${module}/html.tar/${module}");
#system("cd ${destdir}/latex/; \
# $PDFLATEX refman.tex \
# $MAKEINDEX refman.idx \
# $PDFLATEX refman.tex \
# mv -f refman.pdf ${module}.pdf");
print
<<END;
Execute:
latex refman; makeindex refman; latex refman
END
storage/ndb/home/bin/ngcalc
deleted
100755 → 0
View file @
cdb5fada
#! /usr/local/bin/perl
use
strict
;
use
Getopt::
Long
;
sub
usage
{
print
<<END;
ngcalc -- calculate node groups and table fragments
usage: ngcalc [ options ] f1 f2 ...
-g num number of node groups (default 2)
-r num number of replicas (default 2)
-n list comma-separated list of db nodes (default 1,2,...)
fX number of fragments per node group in table X (e.g. 1,2,8)
(all replicas count as same fragment)
END
exit
(
1
);
};
use
vars
qw($cnoOfNodeGroups $cnoReplicas $nodeArray)
;
$cnoOfNodeGroups
=
2
;
$cnoReplicas
=
2
;
GetOptions
(
"
g=i
"
=>
\
$cnoOfNodeGroups
,
"
r=i
"
=>
\
$cnoReplicas
,
"
n=s
"
=>
\
$nodeArray
,
)
or
&
usage
;
my
@tableList
=
@ARGV
;
$cnoOfNodeGroups
>
0
or
&
usage
;
$cnoReplicas
>
0
or
&
usage
;
if
(
!
defined
(
$nodeArray
))
{
$nodeArray
=
join
('
,
',
1
..
(
$cnoOfNodeGroups
*
$cnoReplicas
));
}
$nodeArray
=~
/^\d+(,\d+)*$/
or
&
usage
;
my
@nodeArray
=
split
(
/,/
,
$nodeArray
);
@nodeArray
==
$cnoOfNodeGroups
*
$cnoReplicas
or
&
usage
;
my
@nodeGroupRecord
;
for
(
my
$i
=
0
;
$i
<
$cnoOfNodeGroups
;
$i
++
)
{
my
$rec
=
{};
my
$nodes
=
[]
;
for
(
my
$j
=
0
;
$j
<
$cnoReplicas
;
$j
++
)
{
push
(
@$nodes
,
$nodeArray
[
$i
*
$cnoReplicas
+
$j
]);
}
$rec
->
{
nodesInGroup
}
=
$nodes
;
$rec
->
{
nodeCount
}
=
$cnoReplicas
;
$rec
->
{
nextReplicaNode
}
=
0
;
$nodeGroupRecord
[
$i
]
=
$rec
;
print
"
NG
$i
:
",
join
("
",
@
{
$rec
->
{
nodesInGroup
}}),
"
\n
";
}
# see Dbdih::execCREATE_FRAGMENTATION_REQ
my
$c_nextNodeGroup
=
0
;
for
(
my
$t
=
0
;
$t
<
@tableList
;
$t
++
)
{
use
integer
;
my
$f
=
$tableList
[
$t
];
my
$ng
=
$c_nextNodeGroup
++
;
$c_nextNodeGroup
=
0
if
$c_nextNodeGroup
==
$cnoOfNodeGroups
;
my
$noOfFragments
=
$f
*
$cnoOfNodeGroups
;
my
@fragments
;
for
(
my
$fragNo
=
0
;
$fragNo
<
$noOfFragments
;
$fragNo
++
)
{
my
$rec
=
$nodeGroupRecord
[
$ng
];
my
$max
=
$rec
->
{
nodeCount
};
my
$ind
=
$rec
->
{
nextReplicaNode
};
$rec
->
{
nextReplicaNode
}
=
(
$ind
+
1
>=
$max
?
0
:
$ind
+
1
);
for
(
my
$replicaNo
=
0
;
$replicaNo
<
$cnoReplicas
;
$replicaNo
++
)
{
my
$nodeId
=
$rec
->
{
nodesInGroup
}[
$ind
++
];
push
(
@fragments
,
$nodeId
);
$ind
=
(
$ind
==
$max
?
0
:
$ind
);
}
$ng
++
;
$ng
=
(
$ng
==
$cnoOfNodeGroups
?
0
:
$ng
);
}
printf
"
%02d %s
\n
",
$t
,
join
("
",
@fragments
);
}
storage/ndb/home/bin/parseConfigFile.awk
deleted
100644 → 0
View file @
cdb5fada
BEGIN
{
where
=
0
;
n_hosts
=
0
;
n_api
=
0
;
n_ndb
=
0
;
n_mgm
=
0
;
n_ports
=
0
;
}
/COMPUTERS/
{
where
=
1
;
}
/
\[[
\t]
*COMPUTER
[
\t]
*
\]
/
{
where
=
1
;
}
/PROCESSES/
{
where
=
2
;
}
/Type: MGMT/
{
if
(
where
!=
1
){
where
=
2
;
n_mgm
++
;
}
}
/
\[[
\t]
*MGM
[
\t]
*
\]
/
{
where
=
2
;
n_mgm
++
;
}
/Type: DB/
{
if
(
where
!=
1
){
where
=
3
;
n_ndb
++
;
}
}
/
\[[
\t]
*DB
[
\t]
*
\]
/
{
where
=
3
;
n_ndb
++
;
}
/Type: API/
{
if
(
where
!=
1
){
where
=
4
;
n_api
++
;
}
}
/
\[[
\t]
*API
[
\t]
*
\]
/
{
where
=
4
;
n_api
++
;
}
/HostName:/
{
host_names
[
host_ids
[
n_hosts
]]
=
$2
;
}
/FileSystemPath:/
{
if
(
where
==
3
){
ndb_fs
[
ndb_ids
[
n_ndb
]]
=
$2
;
}
}
/Id:/
{
if
(
where
==
1
){
n_hosts
++
;
host_ids
[
n_hosts
]
=
$2
;
}
if
(
where
==
2
){
mgm_ids
[
n_mgm
]
=
$2
;
}
if
(
where
==
3
){
ndb_ids
[
n_ndb
]
=
$2
;
}
if
(
where
==
4
){
api_ids
[
n_api
]
=
$2
;
}
}
/ExecuteOnComputer:/
{
if
(
where
==
2
){
mgm_hosts
[
mgm_ids
[
n_mgm
]]
=
host_names
[
$2
];
}
if
(
where
==
3
){
ndb_hosts
[
ndb_ids
[
n_ndb
]]
=
host_names
[
$2
];
}
if
(
where
==
4
){
api_hosts
[
api_ids
[
n_api
]]
=
host_names
[
$2
];
}
}
END
{
for
(
i
=
1
;
i
<=
n_mgm
;
i
++
){
printf
(
"mgm_%d=%s\n"
,
mgm_ids
[
i
],
mgm_hosts
[
mgm_ids
[
i
]]);
}
for
(
i
=
1
;
i
<=
n_ndb
;
i
++
){
printf
(
"ndb_%d=%s\n"
,
ndb_ids
[
i
],
ndb_hosts
[
ndb_ids
[
i
]]);
printf
(
"ndbfs_%d=%s\n"
,
ndb_ids
[
i
],
ndb_fs
[
ndb_ids
[
i
]]);
}
for
(
i
=
1
;
i
<=
n_api
;
i
++
){
printf
(
"api_%d=%s\n"
,
api_ids
[
i
],
api_hosts
[
api_ids
[
i
]]);
}
printf
(
"mgm_nodes=%d\n"
,
n_mgm
);
printf
(
"ndb_nodes=%d\n"
,
n_ndb
);
printf
(
"api_nodes=%d\n"
,
n_api
);
}
storage/ndb/home/bin/setup-test.sh
deleted
100755 → 0
View file @
cdb5fada
#!/bin/sh
# NAME
# run-test.sh - Run a test program
#
# SYNOPSIS
# setup-test.sh [ -n <ndb dir>] [ -r <run dir>]
#
# DESCRIPTION
# run a test
#
# OPTIONS
#
# EXAMPLES
#
# ENVIRONMENT
# NDB_PROJ_HOME Home dir for ndb
#
# FILES
# $NDB_PROJ_HOME/lib/funcs.sh shell script functions
#
# DIAGNOSTICTS
#
# VERSION
# 1.01
#
# AUTHOR
# Jonas Oreland
#
#
progname
=
`
basename
$0
`
synopsis
=
"setup-test.sh [-x xterm] [ -n <ndb dir>] [ -r <run dir>]"
:
${
NDB_PROJ_HOME
:?
}
# If undefined, exit with error message
:
${
RUN_NDB_NODE_OPTIONS
:
=--
}
# If undef, set to --. Keeps getopts happy.
# You may have to experiment a bit
# to get quoting right (if you need it).
.
$NDB_PROJ_HOME
/lib/funcs.sh
# Load some good stuff
# defaults for options related variables
#
verbose
=
yes
options
=
""
ndb_dir
=
$NDB_TOP
if
[
-z
"
$ndb_dir
"
]
then
ndb_dir
=
`
pwd
`
fi
local_dir
=
`
pwd
`
own_host
=
`
hostname
`
uniq_id
=
$$
.
$$
_xterm
=
$XTERM
_rlogin
=
"ssh -X"
# used if error when parsing the options environment variable
#
env_opterr
=
"options environment variable: <<
$options
>>"
# Option parsing, for the options variable as well as the command line.
#
# We want to be able to set options in an environment variable,
# as well as on the command line. In order not to have to repeat
# the same getopts information twice, we loop two times over the
# getopts while loop. The first time, we process options from
# the options environment variable, the second time we process
# options from the command line.
#
# The things to change are the actual options and what they do.
#
#
for
optstring
in
"
$options
"
""
# 1. options variable 2. cmd line
do
while
getopts
n:r:x: i
$optstring
# optstring empty => no arg => cmd line
do
case
$i
in
n
)
ndb_dir
=
$OPTARG
;;
# Ndb dir
r
)
run_dir
=
$OPTARG
;;
# Run dir
x
)
_xterm
=
$OPTARG
;;
\?
)
syndie
$env_opterr
;;
# print synopsis and exit
esac
done
[
-n
"
$optstring
"
]
&&
OPTIND
=
1
# Reset for round 2, cmdline options
env_opterr
=
# Round 2 should not use the value
done
shift
`
expr
$OPTIND
- 1
`
# --- option parsing done ---
ndb_dir
=
`
abspath
$ndb_dir
`
run_dir
=
`
abspath
$run_dir
`
trace
"Verifying arguments"
if
[
!
-d
$ndb_dir
/bin
]
||
[
!
-d
$ndb_dir
/lib
]
then
msg
"Ndb home path seems incorrect either
$ndb_dir
/bin or
$ndb_dir
/lib not found"
exit
1004
fi
ndb_bin
=
$ndb_dir
/bin/ndb
mgm_bin
=
$ndb_dir
/bin/mgmtsrvr
api_lib
=
$ndb_dir
/lib/libNDB_API.so
if
[
!
-x
$ndb_bin
]
then
msg
"Ndb path seems incorrect ndb binary not found:
$ndb_bin
"
exit
1004
fi
if
[
!
-x
$mgm_bin
]
then
msg
"Ndb path seems incorrect management server binary not found:
$mgm_bin
"
exit
1004
fi
init_config
=
$run_dir
/mgm.1/initconfig.txt
local_config
=
$run_dir
/mgm.1/localcfg.txt
if
[
!
-r
$init_config
]
||
[
!
-r
$local_config
]
then
msg
"Run path seems incorrect
$init_config
or
$local_config
not found"
exit
1004
fi
trace
"Parsing
$init_config
"
awk
-f
$NDB_PROJ_HOME
/bin/parseConfigFile.awk
$init_config
>
/tmp/run-test.
$uniq_id
.
/tmp/run-test.
$uniq_id
cat
/tmp/run-test.
$uniq_id
rm
-f
/tmp/run-test.
$uniq_id
trace
"Parsing
$local_config
"
MgmPort
=
`
grep
-v
"OwnProcessId"
$local_config
|
cut
-d
" "
-f
2
`
trace
"Verifying that mgm port is empty"
telnet
$mgm_1
$MgmPort
>
/tmp/mgm_port.
$uniq_id
2>&1
<<
EOF
EOF
if
[
0
-lt
`
grep
-c
-i
connected /tmp/mgm_port.
$uniq_id
`
]
then
rm
/tmp/mgm_port.
$uniq_id
msg
"There is already something using port
$mgm_1
:
$MgmPort
"
exit
1003
fi
rm
/tmp/mgm_port.
$uniq_id
fixhost
(){
if
[
"
$1
"
!=
localhost
]
then
echo
$1
else
uname
-n
fi
}
do_xterm
(){
title
=
$1
shift
xterm
-fg
black
-title
"
$title
"
-e
$*
}
save_profile
(){
cp
$HOME
/.profile /tmp/.profile.
$uniq_id
}
wait_restore_profile
(){
while
[
-r
/tmp/.profile.
$uniq_id
]
do
sleep
1
done
}
start_mgm
(){
trace
"Starting Management server on:
$mgm_1
"
save_profile
mgm_1
=
`
fixhost
$mgm_1
`
(
echo
"PATH=
$ndb_dir
/bin:
\$
PATH"
echo
"LD_LIBRARY_PATH=
$ndb_dir
/lib:
\$
LD_LIBRARY_PATH"
echo
"export PATH LD_LIBRARY_PATH"
echo
"cd
$run_dir
/mgm.1"
echo
"ulimit -Sc unlimited"
echo
"mv /tmp/.profile.
$uniq_id
$HOME
/.profile"
)
>>
$HOME
/.profile
do_xterm
"Mmg on
$mgm_1
"
${
_rlogin
}
$mgm_1
&
wait_restore_profile
}
start_ndb_node
(){
node_id
=
$1
dir
=
$run_dir
/ndb.
$1
ndb_host
=
`
eval echo
"
\$
"
ndb_
$node_id
`
ndb_host
=
`
fixhost
$ndb_host
`
ndb_fs
=
`
eval echo
"
\$
"
ndbfs_
$node_id
`
trace
"Starting Ndb node
$node_id
on
$ndb_host
"
save_profile
(
echo
"PATH=
$ndb_dir
/bin:
\$
PATH"
echo
"LD_LIBRARY_PATH=
$ndb_dir
/lib:
\$
LD_LIBRARY_PATH"
echo
"mkdir -p
$ndb_fs
"
echo
"export PATH LD_LIBRARY_PATH"
echo
"cd
$dir
"
echo
"ulimit -Sc unlimited"
echo
"mv /tmp/.profile.
$uniq_id
$HOME
/.profile"
)
>>
$HOME
/.profile
do_xterm
"Ndb:
$node_id
on
$ndb_host
"
${
_rlogin
}
$ndb_host
&
wait_restore_profile
}
start_api_node
(){
node_id
=
$1
dir
=
$run_dir
/api.
$1
api_host
=
`
eval echo
"
\$
"
api_
$node_id
`
api_host
=
`
fixhost
$api_host
`
trace
"Starting api node
$node_id
on
$api_host
"
save_profile
(
echo
"PATH=
$ndb_dir
/bin:
\$
PATH"
echo
"LD_LIBRARY_PATH=
$ndb_dir
/lib:
\$
LD_LIBRARY_PATH"
echo
"export PATH LD_LIBRARY_PATH NDB_PROJ_HOME"
echo
"cd
$dir
"
echo
"ulimit -Sc unlimited"
echo
"mv /tmp/.profile.
$uniq_id
$HOME
/.profile"
)
>>
$HOME
/.profile
do_xterm
"API:
$node_id
on
$api_host
"
${
_rlogin
}
$api_host
&
wait_restore_profile
}
for_each_ndb_node
(){
i
=
1
j
=
`
expr
$mgm_nodes
+ 1
`
while
[
$i
-le
$ndb_nodes
]
do
$*
$j
j
=
`
expr
$j
+ 1
`
i
=
`
expr
$i
+ 1
`
done
}
for_each_api_node
(){
i
=
1
j
=
`
expr
$mgm_nodes
+
$ndb_nodes
+ 1
`
while
[
$i
-le
$api_nodes
]
do
$*
$j
j
=
`
expr
$j
+ 1
`
i
=
`
expr
$i
+ 1
`
done
}
start_mgm
for_each_ndb_node start_ndb_node
for_each_api_node start_api_node
exit
0
storage/ndb/home/bin/signallog2html.lib/signallog2list.awk
deleted
100644 → 0
View file @
cdb5fada
BEGIN
{
PRINT
=
0
;
SIGNAL_ARRAY
[
0
]
=
""
;
BLOCK_ID
=
0
;
SIGNAL_ID
=-
22
;
}
{
SIGNAL_ARRAY
[
SIGNAL_ID
]
=
SIGNAL_ID
;
}
/^---- Send ----- Signal ----------------/
{
DIRECTION
=
"S"
;
SENDER
=
""
;
SENDPROCESS
=
""
;
RECEIVER
=
""
;
RECPROCESS
=
""
;
SIGNAL
=
""
;
RECSIGID
=
"?"
;
SIGID
=
"?"
;
DELAY
=
"N/A"
;
}
/^---- Send delay Signal/
{
DIRECTION
=
"SD"
;
SENDER
=
""
;
SENDPROCESS
=
""
;
RECEIVER
=
""
;
RECPROCESS
=
""
;
SIGNAL
=
""
;
RECSIGID
=
"?"
;
SIGID
=
"?"
;
DELAY
=
$5
;
LEN
=
length
(
DELAY
);
DELAY
=
substr
(
DELAY
,
2
,
LEN
);
}
/^---- Received - Signal ----------------/
{
DIRECTION
=
"R"
;
SENDER
=
""
;
SENDPROCESS
=
""
;
RECEIVER
=
""
;
RECPROCESS
=
""
;
SIGNAL
=
""
;
RECSIGID
=
"?"
;
SIGID
=
"?"
;
DELAY
=
"N/A"
;
}
/r.bn:/
{
RECEIVER
=
$3
;
RECPROCESS
=
$5
;
if
(
DIRECTION
==
"R"
){
SIGNAL
=
$10
;
RECSIGID
=
$7
;
}
else
SIGNAL
=
$8
;
}
/s.bn:/
{
SENDER
=
$3
;
SIGID
=
$7
;
if
(
SIGID
==
SIGNAL_ARRAY
[
SIGID
]){
PRINT
=
1
;
if
(
DIRECTION
==
"R"
){
SIGNAL_ARRAY
[
RECSIGID
]
=
RECSIGID
;
};
}
SENDPROCESS
=
$5
;
LEN
=
length
(
RECEIVER
);
RECEIVER
=
substr
(
RECEIVER
,
2
,
LEN
-
3
);
if
(
BLOCK_ID
==
"ALL"
||
RECEIVER
==
BLOCK_ID
){
PRINT
=
1
;
}
LEN
=
length
(
SENDER
);
SENDER
=
substr
(
SENDER
,
2
,
LEN
-
3
);
if
(
BLOCK_ID
==
"ALL"
||
SENDER
==
BLOCK_ID
){
PRINT
=
1
;}
LEN
=
length
(
SIGNAL
);
SIGNAL
=
substr
(
SIGNAL
,
2
,
LEN
-
2
);
LEN
=
length
(
SENDPROCESS
);
SENDPROCESS
=
substr
(
SENDPROCESS
,
1
,
LEN
-
1
);
LEN
=
length
(
RECPROCESS
);
RECPROCESS
=
substr
(
RECPROCESS
,
1
,
LEN
-
1
);
if
(
PRINT
==
1
){
print
DIRECTION
" "
SENDPROCESS
" "
SENDER
" "
RECPROCESS
" "
RECEIVER
" "
SIGNAL
" "
SIGID
" "
RECSIGID
" "
DELAY
;
}
PRINT
=
0
;
}
storage/ndb/home/bin/signallog2html.lib/uniq_blocks.awk
deleted
100644 → 0
View file @
cdb5fada
BEGIN
{
NAMES
[
""
]
=
""
;
ORDER
[
0
]
=
""
;
NUM
=
0
;
}
{
if
(
NAMES
[
$2$3
]
!=
$2$3
){
NAMES
[
$2$3
]
=
$2$3
;
ORDER
[
NUM
]
=
$2$3
;
NUM
++
;
}
if
(
NAMES
[
$4$5
]
!=
$4$5
){
NAMES
[
$4$5
]
=
$4$5
;
ORDER
[
NUM
]
=
$4$5
;
NUM
++
;
}
}
END
{
for
(
i
=
0
;
i
<
NUM
;
i
++
){
LIST
=
ORDER
[
i
]
" "
LIST
;
}
print
LIST
;
}
storage/ndb/home/bin/signallog2html.sh
deleted
100755 → 0
View file @
cdb5fada
#!/bin/sh
# NAME
# signallog2html.sh
#
# SYNOPSIS
# signallog2html.sh [ -b <block_name | ALL> ] [ -s <signal_id> ] -f signal_log_file
#
# DESCRIPTION
# Creates a signal sequence diagram in HTML format that can be
# viewed from a web browser. The HTML file is created from a signal
# log file and it contains a big table with jpeg files in every
# table cell. Every row in the table is a signal. The block_name
# could be one of the following: CMVMI MISSRA NDBFS NDBCNTR DBACC
# DBDICT DBLQH DBDIH DBTC DBTUP QMGR ALL. The signal_id is a
# number. If no block_name or signal_id is given the default
# block_name "ALL" is used.
#
#
#
# OPTIONS
#
# EXAMPLES
#
#
# ENVIRONMENT
# NDB_PROJ_HOME Home dir for ndb
#
# FILES
# $NDB_PROJ_HOME/lib/funcs.sh General shell script functions.
# uniq_blocks.awk Creates a list of unique blocks
# in the signal_log_file.
# signallog2list.awk Creates a list file from the signal_log_file.
# empty.JPG Jpeg file, must exist in the HTML file
# directory for viewing.
# left_line.JPG
# line.JPG
# right_line.JPG
# self_line.JPG
#
#
# SEE ALSO
#
# DIAGNOSTICTS
#
# VERSION
# 1.0
#
# DATE
# 011029
#
# AUTHOR
# Jan Markborg
#
progname
=
`
basename
$0
`
synopsis
=
"signallog2html.sh [ -b <block_name | ALL> ] [ -s <signal_id> ] -f signal_log_file"
block_name
=
""
signal_id
=
""
verbose
=
yes
signal_log_file
=
""
:
${
NDB_PROJ_HOME
:?
}
# If undefined, exit with error message
:
${
NDB_LOCAL_BUILD_OPTIONS
:
=--
}
# If undef, set to --. Keeps getopts happy.
# You may have to experiment a bit
# to get quoting right (if you need it).
.
$NDB_PROJ_HOME
/lib/funcs.sh
# Load some good stuff
# defaults for options related variables
#
report_date
=
`
date
'+%Y-%m-%d'
`
# Option parsing for the the command line.
#
while
getopts
f:b:s: i
do
case
$i
in
f
)
signal_log_file
=
$OPTARG
;;
b
)
block_name
=
$OPTARG
;;
s
)
signal_id
=
$OPTARG
;;
\?
)
syndie
;;
# print synopsis and exit
esac
done
# -- Verify
trace
"Verifying signal_log_file
$signal_log_file
"
if
[
x
$signal_log_file
=
"x"
]
then
syndie
"Invalid signal_log_file name:
$signal_log_file
not found"
fi
if
[
!
-r
$signal_log_file
]
then
syndie
"Invalid signal_log_file name:
$signal_log_file
not found"
fi
if
[
blocknameSET
=
1
]
then
trace
"Verifying block_name"
case
$block_name
in
CMVMI| MISSRA| NDBFS| NDBCNTR| DBACC| DBDICT| DBLQH| DBDIH| DBTC| DBTUP| QMGR
);;
ALL
)
trace
"Signals to/from every block will be traced!"
;;
*
)
syndie
"Unknown block name:
$block_name
"
;;
esac
fi
if
[
block_name
=
""
-a
signal_id
=
""
]
then
block_name
=
ALL
trace
"block_name =
$block_name
"
fi
trace
"Arguments OK"
###
#
# General html functions
header
(){
cat
<<
EOF
<html><head><title>
$*
</title></head>
<body>
EOF
}
footer
(){
cat
<<
EOF
</body></html>
EOF
}
heading
(){
h
=
$1
;
shift
cat
<<
EOF
<h
$h
>
$*
</h
$h
>
EOF
}
table
(){
echo
"<table
$*
>"
}
table_header
(){
echo
"<th>
$*
</th>"
}
end_table
(){
echo
"</table>"
}
row
(){
echo
"<tr>"
}
end_row
(){
echo
"</tr>"
}
c_column
(){
cat
<<
EOF
<td valign=center align=center>
$*
</td>
EOF
}
bold
(){
cat
<<
EOF
<b>
$*
</b>
EOF
}
column
(){
cat
<<
EOF
<td align=left>
$*
</td>
EOF
}
para
(){
cat
<<
EOF
<p></p>
EOF
}
hr
(){
cat
<<
EOF
<hr>
EOF
}
img_column
(){
cat
<<
EOF
<td><center><
$*
height=100% width=100%></center></td>
EOF
}
# Check the direction of arrow.
# arrowDirection(){ $columnarray $sendnode$sendblock $recnode$recblock
arrowDirection
(){
if
[
$2
=
$3
]
then
arrow
=
SELF
return
;
else
for
x
in
$1
do
if
[
$x
=
$2
]
then
arrow
=
RIGHT
break
elif
[
$x
=
$3
]
then
arrow
=
LEFT
break
fi
done
fi
}
drawImages
(){
for
x
in
$columnarray
do
case
$arrow
in
SELF
)
if
[
$x
=
$sendnode$sendblock
]
then
img_column img
SRC
=
\"
self_line.JPG
\"
else
img_column img
SRC
=
\"
empty.JPG
\"
fi
;;
RIGHT
)
if
[
$x
=
$recnode$recblock
]
then
img_column img
SRC
=
\"
right_line.JPG
\"
weHavePassedRec
=
1
elif
[
$x
=
$sendnode$sendblock
]
then
img_column img
SRC
=
\"
empty.JPG
\"
weHavePassedSen
=
1
elif
[
$weHavePassedRec
=
1
-o
$weHavePassedSen
=
0
]
then
img_column img
SRC
=
\"
empty.JPG
\"
elif
[
$weHavePassedRec
=
0
-a
$weHavePassedSen
=
1
]
then
img_column img
SRC
=
\"
line.JPG
\"
fi
;;
LEFT
)
if
[
$x
=
$recnode$recblock
]
then
img_column img
SRC
=
\"
empty.JPG
\"
weHaveJustPassedRec
=
1
weHavePassedRec
=
1
continue
fi
if
[
$x
=
$sendnode$sendblock
-a
$weHaveJustPassedRec
=
1
]
then
img_column img
SRC
=
\"
left_line.JPG
\"
weHaveJustPassedRec
=
0
weHavePassedSen
=
1
continue
fi
if
[
$x
=
$sendnode$sendblock
]
then
img_column img
SRC
=
\"
line.JPG
\"
weHavePassedSen
=
1
continue
fi
if
[
$weHaveJustPassedRec
=
1
]
then
img_column img
SRC
=
\"
left_line.JPG
\"
weHaveJustPassedRec
=
0
continue
fi
if
[
$weHavePassedSen
=
1
-o
$weHavePassedRec
=
0
]
then
img_column img
SRC
=
\"
empty.JPG
\"
continue
fi
if
[
$weHavePassedRec
=
1
-a
$weHavePassedSen
=
0
]
then
img_column img
SRC
=
\"
line.JPG
\"
continue
fi
column ERROR
;;
*
)
echo
ERROR
;;
esac
done
column
$signal
}
### Main
trace
"Making HTML file"
(
header
"Signal sequence diagram
$report_date
"
heading 1
"Signal sequence diagram
$report_date
"
trace
"Making list file"
#make a signal list file from the signal log file.
`
awk
-f
/home/ndb/bin/signallog2html.lib/signallog2list.awk
SIGNAL_ID
=
$signal_id
BLOCK_ID
=
$block_name
$signal_log_file
>
$signal_log_file
.list
`
COLUMNS
=
`
awk
-f
/home/ndb/bin/signallog2html.lib/uniq_blocks.awk
$signal_log_file
.list |
wc
-w
`
table
"border=0 cellspacing=0 cellpadding=0 cols=
`
expr
$COLUMNS
+ 1
`
"
columnarray
=
`
awk
-f
/home/ndb/bin/signallog2html.lib/uniq_blocks.awk
$signal_log_file
.list
`
row
column
#make an empty first column!
for
col
in
$columnarray
do
table_header
$col
done
grep
""
$signal_log_file
.list |
\
while
read
direction sendnode sendblock recnode recblock signal sigid recsigid delay
do
if
[
$direction
=
"R"
]
then
row
weHavePassedRec
=
0
weHavePassedSen
=
0
weHaveJustPassedRec
=
0
arrow
=
""
# calculate the direction of the arrow.
arrowDirection
"
$columnarray
"
"
$sendnode$sendblock
"
"
$recnode$recblock
"
# Draw the arrow images.
drawImages
end_row
fi
done
end_table
footer
)
>
$signal_log_file
.html
exit
0
storage/ndb/home/bin/stripcr
deleted
100755 → 0
View file @
cdb5fada
#!/bin/sh
# NAME
# stripcr - a program for removing carriage return chars from dos-files.
#
# SYNOPSIS
# stripcr [file...]
#
# DESCRIPTION
# stripcr deletes all CR characters from the given files.
# The files are edited in place.
# If no files are given, stdin and stdout are used instead.
#
# OPTIONS
# -s extension Make a copy of the original of each file, and
# give it the given extension (.bak, .orig, -bak, ...).
#
# EXAMPLES
# stripcr file.txt innerloop.cc
# stripcr -i.bak *.cc
#
# ENVIRONMENT
# NDB_PROJ_HOME Home dir for ndb
#
# FILES
# $NDB_PROJ_HOME/lib/funcs.sh Some userful functions for safe execution
# of commands, printing, and tracing.
#
# VERSION
# 1.0
#
# AUTHOR
# Jonas Mölsä
#
progname
=
`
basename
$0
`
synopsis
=
"stripcr [-s extension] [file...]"
:
${
NDB_PROJ_HOME
:?
}
# If undefined, exit with error message
:
${
STRIPCR_OPTIONS
:
=--
}
# If undefined, set to --, to keep getopts happy.
# You may have to experiment, to get quoting right.
.
$NDB_PROJ_HOME
/lib/funcs.sh
# defaults for options related variables
#
extension
=
options
=
"
$STRIPCR_OPTIONS
"
# used if error when parsing the options environment variable
#
env_opterr
=
"options environment variable: <<
$options
>>"
# We want to be able to set options in an environment variable,
# as well as on the command line. In order not to have to repeat
# the same getopts information twice, we loop two times over the
# getopts while loop. The first time, we process options from
# the options environment variable, the second time we process
# options from the command line.
#
# The things to change are the actual options and what they do.
#
#
for
optstring
in
"
$options
"
""
# 1. options variable 2. cmd line
do
while
getopts
s: i
$optstring
# optstring empty => no arg => cmd line
do
case
$i
in
s
)
extension
=
"
$OPTARG
"
;;
\?
)
syndie
$env_opterr
;;
# print synopsis and exit
esac
done
[
-n
"
$optstring
"
]
&&
OPTIND
=
1
# Reset for round 2, cmd line options
env_opterr
=
# Round 2 should not use the value
done
shift
`
expr
$OPTIND
- 1
`
safe perl
-i
$extension
-lpe
'tr/\r//d'
$*
storage/ndb/home/lib/funcs.sh
deleted
100644 → 0
View file @
cdb5fada
# NAME
# safe, safe_eval, die, rawdie, syndie, msg, errmsg,
# rawmsg, rawerrmsg, trace, errtrace, is_wordmatch
# - functions for safe execution and convenient printing and tracing
#
# abspath - make a path absolute
#
# SYNOPSIS
# . funcs.sh
#
# is_wordmatch requires perl.
#
# DESCRIPTION
# Funcs.sh is a collection of somewhat related functions.
# The main categories and their respective functions are:
# Controlled execution - safe, safe_eval
# Exiting with a message - die, rawdie, syndie
# Printing messages - msg, errmsg, rawmsg, rawerrmsg
# Tracing - trace, errtrace
# Pattern matching - is_wordmatch
#
#
# ENVIRONMENT
# These variables are not exported, but they are still visible
# to, and used by, these functions.
#
# progname basename of $0
# verbose empty or non-emtpy, used for tracing
# synopsis string describing the syntax of $progname
#
# VERSION
# 2.0
#
# AUTHOR
# Jonas Mvlsd
# Jonas Oreland - added abspath
# Safely executes the given command and exits
# with the given commands exit code if != 0,
# else the return value ("the functions exit
# code") is 0. Eg: safely cd $install_dir
#
safely
()
{
"
$@
"
safely_code__
=
$?
[
$safely_code__
-ne
0
]
&&
{
errmsg
"Command failed:
$@
. Exit code:
$safely_code__
."
;
exit
$safely_code__
;
}
:
# return "exit code" 0 from function
}
# Safely_eval executes "eval command" and exits
# with the given commands exit code if != 0,
# else the return value (the functions "exit
# code") is 0.
#
# Safely_eval is just like like safely, but safely_eval does
# "eval command" instead of just "command"
#
# Safely_eval even works with pipes etc., but you have to quote
# the special characters. Eg: safely_eval ls \| wc \> tst.txt 2\>\&1
#
#
safely_eval
()
{
eval
"
$@
"
safely_eval_code__
=
$?
[
$safely_eval_code__
-ne
0
]
&&
{
errmsg
"Command failed:
$@
. Exit code:
$safely_eval_code__
."
;
exit
$safely_eval_code__
;
}
:
# return "exit code" 0 from function
}
#
# safe and safe_eval are deprecated, use safely and safely_eval instead
#
# Safe executes the given command and exits
# with the given commands exit code if != 0,
# else the return value ("the functions exit
# code") is 0.
#
safe
()
{
"
$@
"
safe_code__
=
$?
[
$safe_code__
-ne
0
]
&&
{
errmsg
"Command failed:
$@
. Exit code:
$safe_code__
."
;
exit
$safe_code__
;
}
:
# return "exit code" 0 from function
}
# Safe_eval executes "eval command" and exits
# with the given commands exit code if != 0,
# else the return value (the functions "exit
# code") is 0.
#
# Safe_eval is just like like safe, but safe_eval does
# "eval command" instead of just "command"
#
# Safe_eval even works with pipes etc., but you have to quote
# the special characters. Eg: safe_eval ls \| wc \> tst.txt 2\>\&1
#
#
safe_eval
()
{
eval
"
$@
"
safe_eval_code__
=
$?
[
$safe_eval_code__
-ne
0
]
&&
{
errmsg
"Command failed:
$@
. Exit code:
$safe_eval_code__
."
;
exit
$safe_eval_code__
;
}
:
# return "exit code" 0 from function
}
# die prints the supplied message to stderr,
# prefixed with the program name, and exits
# with the exit code given by "-e num" or
# 1, if no -e option is present.
#
die
()
{
die_code__
=
1
[
"X
$1
"
=
X-e
]
&&
{
die_code__
=
$2
;
shift
2
;
}
[
"X
$1
"
=
X--
]
&&
shift
errmsg
"
$@
"
exit
$die_code__
}
# rawdie prints the supplied message to stderr.
# It then exits with the exit code given with "-e num"
# or 1, if no -e option is present.
#
rawdie
()
{
rawdie_code__
=
1
[
"X
$1
"
=
X-e
]
&&
{
rawdie_code__
=
$2
;
shift
2
;
}
[
"X
$1
"
=
X--
]
&&
shift
rawerrmsg
"
$@
"
exit
$rawdie_code__
}
# Syndie prints the supplied message (if present) to stderr,
# prefixed with the program name, on the first line.
# On the second line, it prints $synopsis.
# It then exits with the exit code given with "-e num"
# or 1, if no -e option is present.
#
syndie
()
{
syndie_code__
=
1
[
"X
$1
"
=
X-e
]
&&
{
syndie_code__
=
$2
;
shift
2
;
}
[
"X
$1
"
=
X--
]
&&
shift
[
-n
"
$*
"
]
&&
msg
"
$*
"
rawdie
-e
$syndie_code__
"Synopsis:
$synopsis
"
}
# msg prints the supplied message to stdout,
# prefixed with the program name.
#
msg
()
{
echo
"
${
progname
:-
<no program name set>
}
:"
"
$@
"
}
# msg prints the supplied message to stderr,
# prefixed with the program name.
#
errmsg
()
{
echo
"
${
progname
:-
<no program name set>
}
:"
"
$@
"
>
&2
}
rawmsg
()
{
echo
"
$*
"
;
}
# print the supplied message to stdout
rawerrmsg
()
{
echo
"
$*
"
>
&2
;
}
# print the supplied message to stderr
# trace prints the supplied message to stdout if verbose is non-null
#
trace
()
{
[
-n
"
$verbose
"
]
&&
msg
"
$@
"
}
# errtrace prints the supplied message to stderr if verbose is non-null
#
errtrace
()
{
[
-n
"
$verbose
"
]
&&
msg
"
$@
"
>
&2
}
# SYNTAX
# is_wordmatch candidatelist wordlist
#
# DESCRIPTION
# is_wordmatch returns true if any of the words (candidates)
# in candidatelist is present in wordlist, otherwise it
# returns false.
#
# EXAMPLES
# is_wordmatch "tuareg nixdorf low content" "xx yy zz low fgj turn roff sd"
# returns true, since "low" in candidatelist is present in wordlist.
#
# is_wordmatch "tuareg nixdorf low content" "xx yy zz slow fgj turn roff sd"
# returns false, since none of the words in candidatelist occurs in wordlist.
#
# is_wordmatch "tuareg nixdorf low content" "xx yy zz low fgj tuareg roff"
# returns true, since "low" and "tuareg" in candidatelist occurs in wordlist.
#
is_wordmatch
()
{
is_wordmatch_pattern__
=
`
echo
$1
|
sed
's/^/\\\\b/;
s/[ ][ ]*/\\\\b|\\\\b/g;
s/$/\\\\b/;'
`
shift
echo
"
$*
"
|
perl
-lne
"m/
$is_wordmatch_pattern__
/ || exit 1"
}
#
# abspath
#
# Stolen from http://oase-shareware.org/shell/shelltips/script_programmer.html
#
abspath
()
{
__abspath_D
=
`
dirname
"
$1
"
`
__abspath_B
=
`
basename
"
$1
"
`
echo
"
`
cd
\"
$__abspath_D
\"
2>/dev/null
&&
pwd
||
echo
\"
$__abspath_D
\"
`
/
$__abspath_B
"
}
#
#
# NdbExit
#
#
NdbExit
()
{
echo
"NdbExit:
$1
"
exit
$1
}
NdbGetExitCode
()
{
__res__
=
`
echo
$*
|
awk
'{if($1=="NdbExit:") print $2;}'
`
if
[
-n
$__res__
]
then
echo
$__res__
else
echo
255
fi
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment