code
stringlengths 2
1.05M
| repo_name
stringlengths 5
110
| path
stringlengths 3
922
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 2
1.05M
|
---|---|---|---|---|---|
#!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
packer build -var-file="${DIR}/packer-vars.json" "${DIR}/packer.json"
| datapipe/generator-bakery | generators/cm-puppet/templates/build.sh | Shell | isc | 140 |
#!/bin/sh
#
# Verifies that go code passes go fmt, go vet, golint, and go test.
#
lintignore=golintignore
o=$(tempfile)
fail() {
echo Failed
cat $o
exit 1
}
echo Formatting
gofmt -l $(find . -name '*.go') 2>&1 > $o
test $(wc -l $o | awk '{ print $1 }') = "0" || fail
echo Vetting
go vet ./... 2>&1 > $o || fail
echo Linting
if [ ! -e $lintignore ]; then
touch $lintignore
fi
t=$(tempfile)
golint . 2>&1 > $t
diff $lintignore $t 2>&1 > $o || fail
echo Testing
go test ./... 2>&1 > $o || fail
| velour/ui | gok.sh | Shell | mit | 502 |
#!/bin/bash
cp index.html /var/www/
cp -r js /var/www
cp -r img /var/www/img
| IgorPelevanyuk/CatClicker-Knockout | deploy.sh | Shell | mit | 77 |
#!/bin/bash
# Set up your StartSSL certificates/keys for nginx-proxy
# This script expects your certificate and key files in this folder following
# the nginx-proxy naming convention.
# For example: foo.example.com.crt foo.example.com.key
# are the .crt and .key file for the domain foo.example.com
# Make sure script is ran from correct directory
if [[ ! -e script.sh ]]; then
if [[ -d certs ]]; then
cd certs || { echo >&2 "Bundle directory exists but I can't cd there."; exit 1; }
else
echo >&2 "Please cd into the bundle before running this script."; exit 1;
fi
fi
CERT_CLASS="class1"
CERT_CA_FILE="sub.${CERT_CLASS}.server.ca.pem"
DHPARAM_FILE="dhparam.pem"
# Get the StartSSL Root CA and Class 1 Intermediate Server CA certificates
if [ ! -f ${CERT_CA_FILE} ]; then
wget https://www.startssl.com/certs/${CERT_CA_FILE}
fi
# Generate dhparam.pem if needed.
if [ ! -f ${DHPARAM_FILE} ]; then
echo "${DHPARAM_FILE} not found."
echo "Generating ${DHPARAM_FILE} with openssl"
openssl dhparam -out ${DHPARAM_FILE} 2048
fi
# Create a private key and certificate and transfer them to your server.
for file in *.key; do
DOMAIN=${file%.*}
if [ ! -f ./unified/${DOMAIN}.crt ]; then
echo "DHPARAM: Copying ${DOMAIN}.${DHPARAM_FILE}"
cp ./${DHPARAM_FILE} ./unified/${DOMAIN}.${DHPARAM_FILE}
echo "CRT: Creating unified ${DOMAIN}.crt"
cat ./${DOMAIN}.crt ${CERT_CA_FILE} > ./unified/${DOMAIN}.crt
# Keys should already be decrypted
echo "KEY: Copying ${DOMAIN}.key"
cp ./${DOMAIN}.key ./unified/${DOMAIN}.key
echo ""
fi
# Protect your key files from prying eyes
chmod 600 ./${DOMAIN}.key
chmod 600 ./unified/${DOMAIN}.key
done
| daveenguyen/propitious-melting-pot | certs/script.sh | Shell | mit | 1,712 |
# (c) Liviu Balan <[email protected]>
# http://www.liviubalan.com/
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
LIV_TUT_META_URL='http://www.liviubalan.com/git-log-command'
| liviubalan/liviubalan.com-vagrant-ubuntu | provision-shell/tutorials/000/069/meta.sh | Shell | mit | 260 |
#!/usr/bin/bash
# Hard variables
# Directory containing Snakemake and cluster.json files
snakefile_dir='/nas/longleaf/home/sfrenk/pipelines/snakemake'
usage="\nCreate directory with Snakemake files required for pipeline \n\n setup_dir -p <pipeline> -d <directory> \n\n pipelines: bowtie_srna, hisat2_rna, srna_telo\n\n"
pipeline=""
if [ -z "$1" ]; then
printf "$usage"
exit
fi
while [[ $# > 0 ]]
do
key="$1"
case $key in
-p|--pipeline)
pipeline="$2"
shift
;;
-d|--dir)
dir="$2"
shift
;;
-h|--help)
printf "$usage"
exit
;;
esac
shift
done
if [[ ! -d $dir ]]; then
echo "ERROR: Invalid directory"
exit 1
fi
if [[ $pipeline == "" ]]; then
echo "ERROR: Please select pipeline: bowtie_srna or hisat2_rna"
exit 1
fi
# Determine pipeline file
case $pipeline in
"bowtie_srna"|"bowtie_sRNA")
snakefile="bowtie_srna.Snakefile"
;;
"hisat2_rna"|"hisat2_RNA")
snakefile='hisat2_rna.Snakefile'
;;
"srna_telo")
snakefile="srna_telo.Snakefile"
;;
*)
echo "ERROR: Invalid pipeline. Please select one of the following: bowtie_srna, hisat2_rna or srna_telo"
exit 1
;;
esac
# Copy over the snakefile
cp ${snakefile_dir}/${snakefile} ./${snakefile}
# Edit base directory in Snakefile
# Remove trailing "/" from dir if it's there
input_dir="$(echo $dir |sed -r 's/\/$//')"
input_dir=\"${input_dir}\"
sed -i -e "s|^BASEDIR.*|BASEDIR = ${input_dir}|" $snakefile
# Determine file extension
extension="$(ls $dir | grep -Eo "\.[^/]+(\.gz)?$" | sort | uniq)"
# Check if there are multiple file extensions in the same directory
ext_count="$(ls $dir | grep -Eo "\.[^/]+(\.gz)?$" | sort | uniq | wc -l)"
if [[ $ext_count == 0 ]]; then
echo "ERROR: Directory is empty!"
elif [[ $ext_count != 1 ]]; then
echo "WARNING: Multiple file extensions found: using .fastq.gz"
extension=".fastq.gz"
fi
# Edit extension and utils_dir in Snakefile
extension="\"${extension}\""
sed -i -e "s|^EXTENSION.*|EXTENSION = ${extension}|g" $snakefile
utils_dir="${snakefile_dir%/snakemake}/utils"
utils_dir="\"${utils_dir}\""
sed -i -e "s|^UTILS_DIR.*|UTILS_DIR = ${utils_dir}|g" $snakefile
# Create Snakmake command script
printf "#!/usr/bin/bash\n" > "run_snakemake.sh"
printf "#SBATCH -t 2-0\n\n" >> "run_snakemake.sh"
printf "module add python\n\n" >> "run_snakemake.sh"
printf "snakemake -s $snakefile --keep-going --rerun-incomplete --cluster-config ${snakefile_dir}/cluster.json -j 100 --cluster \"sbatch -n {cluster.n} -N {cluster.N} -t {cluster.time}\"\n" >> run_snakemake.sh
| sfrenk/rna-seq_pipelines | utils/setup_dir.sh | Shell | mit | 2,550 |
#!/usr/bin/env bash
GEM_BIN=$1/ruby/bin/gem
export GEM_HOME=/usr/local/kidsruby/ruby/lib/ruby/gems/1.9.1
install_gems() {
echo $KIDSRUBY_INSTALLING_GEMS
${GEM_BIN} install htmlentities-4.3.0.gem --no-ri --no-rdoc 2>&1
${GEM_BIN} install rubywarrior-i18n-0.0.3.gem --no-ri --no-rdoc 2>&1
${GEM_BIN} install serialport-1.1.1-universal.x86_64-darwin-10.gem --no-ri --no-rdoc 2>&1
${GEM_BIN} install hybridgroup-sphero-1.0.1.gem --no-ri --no-rdoc 2>&1
}
install_qtbindings() {
echo $KIDSRUBY_INSTALLING_QTBINDINGS
${GEM_BIN} install qtbindings-4.7.3-universal-darwin-10.gem --no-ri --no-rdoc 2>&1
}
install_gosu() {
echo $KIDSRUBY_INSTALLING_GOSU
${GEM_BIN} install gosu-0.7.36.2-universal-darwin.gem --no-ri --no-rdoc 2>&1
}
install_gems
install_qtbindings
install_gosu
| hybridgroup/kidsrubyinstaller-osx | install_gems.sh | Shell | mit | 784 |
#!/bin/bash
DATESTR=$(date +"%Y%m%d%H%M%S")
LOCAL_BACKUP_DIR=/home/rails/db_backups/dsi
function fetch_ntriples() {
FILE_NAME="$LOCAL_BACKUP_DIR/$1"
GRAPH_URI=$2
SOURCE_URI=http://46.4.78.148/dsi/data?graph=$GRAPH_URI
curl -s -H "Accept:text/plain" -f -o $FILE_NAME $SOURCE_URI
CURL_STATUS=$?
if [ $CURL_STATUS -ne 0 ]; then
echo "Failed to fetch URL with curl: $SOURCE_URI"
echo "Backup Failed to Complete."
exit 1
fi
gzip $FILE_NAME
echo "Downloaded & Gzipped triples to $FILE_NAME"
}
function upload_to_s3() {
FNAME=$1
FILE_NAME="$LOCAL_BACKUP_DIR/$FNAME.gz"
s3cmd put -P $FILE_NAME s3://digitalsocial-dumps
S3_STATUS=$?
if [ $S3_STATUS -ne 0 ]; then
echo "Failed to put backup on S3"
echo "Backup Failed to Complete."
exit 2
fi
echo "Copied $FILE_NAME to S3"
}
# For backup purposes
function set_modified_date() {
query=`printf 'WITH <http://data.digitalsocial.eu/graph/organizations-and-activities/metadata>
DELETE {?ds <http://purl.org/dc/terms/modified> ?mod}
INSERT {?ds <http://purl.org/dc/terms/modified> "%s"^^<http://www.w3.org/2001/XMLSchema#dateTime>}
WHERE { GRAPH <http://data.digitalsocial.eu/graph/organizations-and-activities/metadata> { ?ds a <http://publishmydata.com/def/dataset#Dataset> .
OPTIONAL {?ds <http://purl.org/dc/terms/modified> ?mod} } }' $DATESTR`
curl -s -f -d "request=$query" http://46.4.78.148/dsi/update > /dev/null
CURL_STATUS=$?
if [ $CURL_STATUS -ne 0 ]; then
echo "Failed to update modified date"
echo "Backup Failed to Complete."
exit 3
fi
echo "Modification Date Set"
}
function remove_dsi_backup() {
FNAME=$1
rm $FNAME
echo "Removed old local backup: $FNAME"
}
export -f remove_dsi_backup # export the function so we can use it with find
function remove_old_backups() {
# NOTE the crazy syntax for calling an exported function and
# passing an arg to find -exec.
find $LOCAL_BACKUP_DIR -mtime +14 -exec bash -c 'remove_dsi_backup "$0"' {} \;
}
MAIN_DATA_SET="dataset_data_organizations-and-activities_$DATESTR.nt"
ACTIVITY_TYPES="concept-scheme_activity_types_$DATESTR.nt"
ACTIVITY_TECHNOLOGY_METHODS="concept-scheme_activity-technology-methods_$DATESTR.nt"
AREA_OF_SOCIETY="concept-scheme_area-of-society_$DATESTR.nt"
fetch_ntriples $MAIN_DATA_SET "http%3A%2F%2Fdata.digitalsocial.eu%2Fgraph%2Forganizations-and-activities"
fetch_ntriples $ACTIVITY_TYPES "http%3A%2F%2Fdata.digitalsocial.eu%2Fgraph%2Fconcept-scheme%2Factivity-type"
fetch_ntriples $ACTIVITY_TECHNOLOGY_METHODS "http%3A%2F%2Fdata.digitalsocial.eu%2Fgraph%2Fconcept-scheme%2Ftechnology-method"
fetch_ntriples $AREA_OF_SOCIETY "http%3A%2F%2Fdata.digitalsocial.eu%2Fgraph%2Fconcept-scheme%2Farea-of-society"
upload_to_s3 $MAIN_DATA_SET
upload_to_s3 $ACTIVITY_TYPES
upload_to_s3 $ACTIVITY_TECHNOLOGY_METHODS
upload_to_s3 $AREA_OF_SOCIETY
set_modified_date
remove_old_backups
echo "$DATESTR Backup Complete."
| Swirrl/digitalsocial | backup_data_graph.sh | Shell | mit | 3,022 |
#LocalFolderPathStr=${PWD}
#LocalFilePathStr=$LocalFolderPathStr'/.pypirc'
#HomeFilePathStr=$HOME'/.pypirc'
#echo $LocalFilePathStr
#echo $HomeFilePathStr
#cp $LocalFilePathStr $HomeFilePathStr
python setup.py register
sudo python setup.py sdist upload | Ledoux/ShareYourSystem | Pythonlogy/upload.sh | Shell | mit | 252 |
#!/bin/sh
set -e
NEEDS_CONFIG=0
if [ "$#" -lt "1" ]; then
echo "Bad usage: check_reconf.sh [target_package] (optional configure arguments)"
exit 1
fi
if [ "$ONYX_ARCH" = "" ]; then
echo "ONYX_ARCH needs to be set!"
exit 1
fi
if [ "$HOST" = "" ]; then
echo "HOST needs to be set!"
exit 1
fi
if [ "$SYSROOT" = "" ]; then
echo "SYSROOT needs to be set!"
exit 1
fi
TARGET_PKG=$1
cd $TARGET_PKG
if [ -f "CONF_STAMP" ]; then
if [ $(cat CONF_STAMP) != "ARCH=${ONYX_ARCH}" ]; then
NEEDS_CONFIG=1
fi
else
NEEDS_CONFIG=1
fi
#echo "Needs conf: ${NEEDS_CONFIG}"
# Shift the arguments by one so we discard the first argument
shift 1
if [ "$NEEDS_CONFIG" = 0 ]; then
exit 0
fi
# Try and make clean/make distclean because some makefiles are kind of buggy **cough cough musl**
if [ -f Makefile ]; then
make distclean || make clean || true
fi
./configure --host=$HOST --with-sysroot=$SYSROOT "$@"
echo "ARCH=${ONYX_ARCH}" > CONF_STAMP
| heatd/Onyx | scripts/check_reconf.sh | Shell | mit | 948 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for DSA-3345-1
#
# Security announcement date: 2015-08-29 00:00:00 UTC
# Script generation date: 2017-01-01 21:07:32 UTC
#
# Operating System: Debian 8 (Jessie)
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - iceweasel:38.2.1esr-1~deb8u1
#
# Last versions recommanded by security team:
# - iceweasel:38.8.0esr-1~deb8u1
#
# CVE List:
# - CVE-2015-4497
# - CVE-2015-4498
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade iceweasel=38.8.0esr-1~deb8u1 -y
| Cyberwatch/cbw-security-fixes | Debian_8_(Jessie)/x86_64/2015/DSA-3345-1.sh | Shell | mit | 652 |
"$CLOUD_REBUILD" CDump 32 dll release same | xylsxyls/xueyelingshuang | src/CDump/version_release.sh | Shell | mit | 42 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-2166-1
#
# Security announcement date: 2014-04-14 00:00:00 UTC
# Script generation date: 2017-01-01 21:03:46 UTC
#
# Operating System: Ubuntu 12.04 LTS
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - libsnmp15:5.4.3~dfsg-2.4ubuntu1.2
#
# Last versions recommanded by security team:
# - libsnmp15:5.4.3~dfsg-2.4ubuntu1.3
#
# CVE List:
# - CVE-2012-6151
# - CVE-2014-2284
# - CVE-2014-2285
# - CVE-2014-2310
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade libsnmp15=5.4.3~dfsg-2.4ubuntu1.3 -y
| Cyberwatch/cbw-security-fixes | Ubuntu_12.04_LTS/x86_64/2014/USN-2166-1.sh | Shell | mit | 706 |
#! /bin/bash
BASHRC="$HOME/.bashrc"
echo "# java setting" >> $BASHRC
echo "export JAVA_HOME=\$(/usr/libexec/java_home)" >> $BASHRC
echo "" >> $BASHRC
| terracotta-ko/BashScripts | installScript/java_config.sh | Shell | mit | 152 |
#!/bin/bash
cd "$(dirname "$BASH_SOURCE")" || {
echo "Error getting script directory" >&2
exit 1
}
./hallo.command
sudo launchctl load -w /System/Library/LaunchDaemons/com.apple.metadata.mds.plist | kiuz/Osx_Terminal_shell_script_collection | enable_spotlight.command | Shell | mit | 204 |
#!/bin/sh
# Compile script to compile against Elysion library
# Works for Mac OS X and Linux
FPC_BIN=`which fpc`
BIN_FOLDER="../bin"
LIB_FOLDER="../lib"
RES_FOLDER="../resources"
SRC_FOLDER="../source"
# Info.plist constants
BUNDLE_REGION="English"
BUNDLE_ICON="logo.icns"
BUNDLE_IDENT="com.mycompanyname"
BUNDLE_SIGNATURE="????"
BUNDLE_VERSION="1.0"
SRC_MAIN="main.lpr"
BIN_MAIN=`basename ${SRC_MAIN} .lpr`
CONFIG_FILE="@config.cfg"
EXEC_NAME="myapp"
APP_NAME="My App Title"
cd ${SRC_FOLDER}
if [ -f /System/Library/Frameworks/Cocoa.framework/Cocoa ]
then
SDL_PATH=
SDL_MIXER_PATH=
SDL_TTF_PATH=
SDL_NET_PATH=
DEV_LINK_PPC=
DEV_LINK_INTEL32=
DEV_LINK_INTEL64=
MIN_PPC=
MIN_INTEL32=
MIN_INTEL64=
if [ -d /Library/Frameworks/SDL.framework ]
then
SDL_PATH="/Library/Frameworks/SDL.framework"
elif [ -d ~/Library/Frameworks/SDL.framework ]
then
SDL_PATH="~/Library/Frameworks/SDL.framework"
else
echo "SDL not detected. Please check: https://github.com/freezedev/elysion/wiki/Setting-up-our-development-environment"
exit 1
fi
if [ -d /Library/Frameworks/SDL_mixer.framework ]
then
SDL_MIXER_PATH="/Library/Frameworks/SDL_mixer.framework"
elif [ -d ~/Library/Frameworks/SDL_mixer.framework ]
then
SDL_MIXER_PATH="~/Library/Frameworks/SDL_mixer.framework"
fi
if [ -d /Library/Frameworks/SDL_ttf.framework ]
then
SDL_TTF_PATH="/Library/Frameworks/SDL_ttf.framework"
elif [ -d ~/Library/Frameworks/SDL_ttf.framework ]
then
SDL_TTF_PATH="~/Library/Frameworks/SDL_ttf.framework"
fi
if [ -d /Library/Frameworks/SDL_net.framework ]
then
SDL_NET_PATH="/Library/Frameworks/SDL_net.framework"
elif [ -d ~/Library/Frameworks/SDL_net.framework ]
then
SDL_NET_PATH="~/Library/Frameworks/SDL_net.framework"
fi
if [ [ -d /Developer/SDKs/MacOSX10.7.sdk ] || [ -d /Developer/SDKs/MacOSX10.6.sdk ] || [ -d /Developer/SDKs/MacOSX10.5.sdk ] || [ -d /Developer/SDKs/MacOSX10.4u.sdk ] ]
then
echo "At least one Mac OS X SDK found"
else
echo "XCode does not seem be installed. Please install XCode."
exit 1
fi
if [ -d "/Developer/SDKs/MacOSX10.7.sdk" ]
then
DEV_LINK_PPC=
DEV_LINK_INTEL32="/Developer/SDKs/MacOSX10.7.sdk"
DEV_LINK_INTEL64="/Developer/SDKs/MacOSX10.7.sdk"
MIN_INTEL32="10.7.0"
MIN_INTEL64="10.7.0"
fi
if [ -d "/Developer/SDKs/MacOSX10.6.sdk" ]
then
DEV_LINK_PPC=
DEV_LINK_INTEL32="/Developer/SDKs/MacOSX10.6.sdk"
DEV_LINK_INTEL64="/Developer/SDKs/MacOSX10.6.sdk"
MIN_INTEL32="10.6.0"
MIN_INTEL64="10.6.0"
fi
if [ -d "/Developer/SDKs/MacOSX10.5.sdk" ]
then
DEV_LINK_PPC="/Developer/SDKs/MacOSX10.5.sdk"
DEV_LINK_INTEL32="/Developer/SDKs/MacOSX10.5.sdk"
MIN_INTEL32="10.5.0"
fi
if [ -d "/Developer/SDKs/MacOSX10.4u.sdk" ]
then
DEV_LINK_PPC="/Developer/SDKs/MacOSX10.4u.sdk"
DEV_LINK_INTEL32="/Developer/SDKs/MacOSX10.4u.sdk"
MIN_PPC="10.4.0"
MIN_INTEL32="10.4.0"
fi
FPC_BIN=`which ppc386`
# Compiling Intel x86 binary
${FPC_BIN} ${CONFIG_FILE} -XR${DEV_LINK_INTEL32} -k"-L${LIB_FOLDER}/MacOSX -L/usr/X11R6/lib" ${SRC_MAIN}
mv "${BIN_FOLDER}/${BIN_MAIN}" "${BIN_FOLDER}/${BIN_MAIN}-intel_x86"
rm ${BIN_FOLDER}/*.o ${BIN_FOLDER}/*.ppu
FPC_BIN=`which ppcx64`
# Compiling Intel x64 binary
${FPC_BIN} ${CONFIG_FILE} -XR${DEV_LINK_INTEL64} -k"-L${LIB_FOLDER}/MacOSX -L/usr/X11R6/lib" ${SRC_MAIN}
mv "${BIN_FOLDER}/${BIN_MAIN}" "${BIN_FOLDER}/${BIN_MAIN}-intel_x64"
rm ${BIN_FOLDER}/*.o ${BIN_FOLDER}/*.ppu
FPC_BIN=`which ppcppc`
# Compiling PowerPC binary
${FPC_BIN} ${CONFIG_FILE} -XR${DEV_LINK_PPC} -k"-L${LIB_FOLDER}/MacOSX -L/usr/X11R6/lib" ${SRC_MAIN}
mv "${BIN_FOLDER}/${BIN_MAIN}" "${BIN_FOLDER}/${BIN_MAIN}-ppc"
rm ${BIN_FOLDER}/*.o ${BIN_FOLDER}/*.ppu
# Creating universal binary
# Strip executables
if [ -f "${BIN_FOLDER}/${BIN_MAIN}-intel_x86" ]
then
strip "${BIN_FOLDER}/${BIN_MAIN}-intel_x86"
fi
if [ -f "${BIN_FOLDER}/${BIN_MAIN}-intel_x64" ]
then
strip "${BIN_FOLDER}/${BIN_MAIN}-intel_x64"
fi
if [ -f "${BIN_FOLDER}/${BIN_MAIN}-ppc" ]
then
strip "${BIN_FOLDER}/${BIN_MAIN}-ppc"
fi
# All three compilers are here... yeah, universal binary de luxe (Intel 32, Intel 64 + PowerPC 32)
if [ -f "${BIN_FOLDER}/${BIN_MAIN}-intel_x86" ] && [ -f "${BIN_FOLDER}/${BIN_MAIN}-intel_x64" ] && [ -f "${BIN_FOLDER}/${BIN_MAIN}-ppc" ]
then
lipo -create "${BIN_FOLDER}/${BIN_MAIN}-intel_x86" "${BIN_FOLDER}/${BIN_MAIN}-intel_x64" "${BIN_FOLDER}/${BIN_MAIN}-ppc" -output "${BIN_FOLDER}/${EXEC_NAME}"
rm -rf "${BIN_FOLDER}/${BIN_MAIN}-intel_x86"
rm -rf "${BIN_FOLDER}/${BIN_MAIN}-intel_x64"
rm -rf "${BIN_FOLDER}/${BIN_MAIN}-ppc"
# PowerPC 32 + Intel 32
elif [ -f "${BIN_FOLDER}/${BIN_MAIN}-intel_x86" ] && [ -f "${BIN_FOLDER}/${BIN_MAIN}-ppc" ]
then
lipo -create "${BIN_FOLDER}/${BIN_MAIN}-intel_x86" "${BIN_FOLDER}/${BIN_MAIN}-ppc" -output "${BIN_FOLDER}/${EXEC_NAME}"
rm -rf "${BIN_FOLDER}/${BIN_MAIN}-intel_x86"
rm -rf "${BIN_FOLDER}/${BIN_MAIN}-ppc"
# Intel 32 + Intel 64
elif [ -f "${BIN_FOLDER}/${BIN_MAIN}-intel_x86" ] && [ -f "${BIN_FOLDER}/${BIN_MAIN}-intel_x64" ]
then
lipo -create "${BIN_FOLDER}/${BIN_MAIN}-intel_x86" "${BIN_FOLDER}/${BIN_MAIN}-intel_x64" -output "${BIN_FOLDER}/${EXEC_NAME}"
rm -rf "${BIN_FOLDER}/${BIN_MAIN}-intel_x86"
rm -rf "${BIN_FOLDER}/${BIN_MAIN}-intel_x64"
else
strip "${BIN_FOLDER}/${BIN_MAIN}-intel_x86"
mv "${BIN_FOLDER}/${BIN_MAIN}-intel_x86" "${BIN_FOLDER}/${EXEC_NAME}"
fi
if [ -d "${BIN_FOLDER}/${APP_NAME}.app" ]
then
echo " ... Removing old application"
rm -rf "${BIN_FOLDER}/${APP_NAME}.app"
fi
echo " ... Creating Application Bundle"
mkdir "${BIN_FOLDER}/${APP_NAME}.app"
mkdir "${BIN_FOLDER}/${APP_NAME}.app/Contents"
mkdir "${BIN_FOLDER}/${APP_NAME}.app/Contents/MacOS"
mkdir "${BIN_FOLDER}/${APP_NAME}.app/Contents/Resources"
mkdir "${BIN_FOLDER}/${APP_NAME}.app/Contents/Frameworks"
cp -R "${RES_FOLDER}/" "${BIN_FOLDER}/${APP_NAME}.app/Contents/Resources/"
# Copy frameworks from System
cp -R "${SDL_PATH}" "${BIN_FOLDER}/${APP_NAME}.app/Contents/Frameworks/"
cp -R "${SDL_MIXER_PATH}" "${BIN_FOLDER}/${APP_NAME}.app/Contents/Frameworks/"
cp -R "${SDL_TTF_PATH}" "${BIN_FOLDER}/${APP_NAME}.app/Contents/Frameworks/"
cp -R "${SDL_NET_PATH}" "${BIN_FOLDER}/${APP_NAME}.app/Contents/Frameworks/"
mv "${BIN_FOLDER}/${EXEC_NAME}" "${BIN_FOLDER}/${APP_NAME}.app/Contents/MacOS/"
echo "<?xml version='1.0' encoding='UTF-8'?>\
<!DOCTYPE plist PUBLIC \"-//Apple Computer//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\
<plist version=\"1.0\">\
<dict>\
<key>CFBundleDevelopmentRegion</key>\
<string>${BUNDLE_REGION}</string>\
<key>CFBundleExecutable</key>\
<string>${EXEC_NAME}</string>\
<key>CFBundleIconFile</key>\
<string>${BUNDLE_ICON}</string>\
<key>CFBundleIdentifier</key>\
<string>${BUNDLE_IDENT}</string>\
<key>CFBundleInfoDictionaryVersion</key>\
<string>6.0</string>\
<key>CFBundleName</key>\
<string>${APP_NAME}</string>\
<key>CFBundlePackageType</key>\
<string>APPL</string>\
<key>CFBundleSignature</key>\
<string>${BUNDLE_SIGNATURE}</string>\
<key>CFBundleVersion</key>\
<string>${BUNDLE_VERSION}</string>\
<key>CSResourcesFileMapped</key>\
<true/>\
<key>LSMinimumSystemVersionByArchitecture</key>\
<dict>\
<key>x86_64</key>\
<string>${MIN_INTEL64}</string>\
<key>i386</key>\
<string>${MIN_INTEL32}</string>\
<key>ppc</key>\
<string>${MIN_PPC}</string>\
</dict>\
</dict>\
</plist>" >> "${BIN_FOLDER}/${APP_NAME}.app/Contents/Info.plist"
echo "APPL${BUNDLE_SIGNATURE}" >> "${BIN_FOLDER}/${APP_NAME}.app/Contents/PkgInfo"
else
${FPC_BIN} ${CONFIG_FILE} ${SRC_MAIN}
if [ -f "${BIN_FOLDER}/${BIN_MAIN}" ]
then
mv "${BIN_FOLDER}/${BIN_MAIN}" "${BIN_FOLDER}/${EXEC_NAME}"
fi
fi
| freezedev/survival-guide-for-pirates | scripts/build.sh | Shell | mit | 8,208 |
#!/bin/bash
# ./run pixel_inicio pixel_final pixel_paso frecuencia_inicio frecuencia_final
# frecuencia_paso resolucion_espacial Rt modelo path
COUNTERX=$1
while [ $COUNTERX -le $2 ]; do
if [ ! -d "/home/vdelaluz/ARTs/papers/cavity2D/data/SEL05/${COUNTERX}" ]; then
# Control will enter here if $DIRECTORY doesn't exist
mkdir /home/vdelaluz/ARTs/papers/cavity2D/data/SEL05/${COUNTERX}
fi
COUNTER=$4
while [ $COUNTER -le $5 ]; do
echo Computing $COUNTERX $COUNTER
mpiexec -n 1 ./pakal -model $9 -min 1e-40 -r $7 -xy ${COUNTERX} 0 -detail 1 -big 1 -nu ${COUNTER} -Rt $8 -h 7.353e5 -f -7.36e5 -v 10 > /home/vdelaluz/ARTs/papers/cavity2D/data/SEL05/${COUNTERX}/${COUNTER}GHz.dat
mv emission_${COUNTERX}_0.dat /home/vdelaluz/ARTs/papers/cavity2D/data/SEL05/${COUNTERX}/${COUNTER}GHz_emission_0_0.dat
mv profile_${COUNTERX}_0.dat /home/vdelaluz/ARTs/papers/cavity2D/data/SEL05/${COUNTERX}/${COUNTER}GHz_profile_0_0.dat
let COUNTER=COUNTER+$6
done
let COUNTERX=COUNTERX+$3
done
| itztli/pakal | scripts/run.sh | Shell | mit | 1,003 |
#!/bin/bash
# Exit immediately if any commands return non-zero
set -e
# Output the commands we run
set -x
# This is a modified version of the Cloud Foundry Blue/Green deployment guide:
# https://docs.pivotal.io/pivotalcf/devguide/deploy-apps/blue-green.html
test $URL
# Update the blue app
cf unmap-route citizenship-appointment-blue $URL
cf push citizenship-appointment-blue -b https://github.com/AusDTO/java-buildpack.git --no-hostname --no-manifest --no-route -p build/libs/citizenship-appointments-0.0.1.jar -i 1 -m 512M
cf map-route citizenship-appointment-blue $URL
# Update the green app
cf unmap-route citizenship-appointment-green $URL
cf push citizenship-appointment-green -b https://github.com/AusDTO/java-buildpack.git --no-hostname --no-manifest --no-route -p build/libs/citizenship-appointments-0.0.1.jar -i 1 -m 512M
cf map-route citizenship-appointment-green $URL
| AusDTO/citizenship-appointment-server | bin/cideploy.sh | Shell | mit | 886 |
#!/bin/bash
set -e
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
channel() {
if [ -n "${TRAVIS}" ]; then
if [ "${TRAVIS_RUST_VERSION}" = "${CHANNEL}" ]; then
pwd
(set -x; cargo "$@")
fi
elif [ -n "${APPVEYOR}" ]; then
if [ "${APPVEYOR_RUST_CHANNEL}" = "${CHANNEL}" ]; then
pwd
(set -x; cargo "$@")
fi
else
pwd
(set -x; cargo "+${CHANNEL}" "$@")
fi
}
if [ -n "${CLIPPY}" ]; then
# cached installation will not work on a later nightly
if [ -n "${TRAVIS}" ] && ! cargo install clippy --debug --force; then
echo "COULD NOT COMPILE CLIPPY, IGNORING CLIPPY TESTS"
exit
fi
cargo clippy -- -Dclippy
else
CHANNEL=nightly
channel clean
channel build
(cd "$DIR/tests/deps" && channel build)
channel test
channel test --features preserve_order
for CHANNEL in stable 1.15.0 1.16.0 1.17.0 beta; do
channel clean
channel build
channel build --features preserve_order
done
fi
| tomasvdw/bitcrust | serde_json/travis.sh | Shell | mit | 1,072 |
#!/bin/bash
set -ex
mkdir /chroot
mkdir /chroot/bin
mkdir /chroot/lib
mkdir /chroot/lib64
mkdir /chroot/dev
mkdir /chroot/tmp
mkdir /chroot/var
# # debootstrap
# debootstrap saucy /chroot
# busybox
cp /bin/busybox /chroot/bin/sh
cp /lib64/ld-linux-x86-64.so.2 /chroot/lib64/ld-linux-x86-64.so.2
cp /lib/x86_64-linux-gnu/libc.so.6 /chroot/lib/libc.so.6
# legacy-bridge
cp /src/sandstorm-master/bin/legacy-bridge /chroot/
cp /usr/local/lib/libcapnp-rpc-0.5-dev.so /chroot/lib/libcapnp-rpc-0.5-dev.so
cp /usr/local/lib/libkj-async-0.5-dev.so /chroot/lib/libkj-async-0.5-dev.so
cp /usr/local/lib/libcapnp-0.5-dev.so /chroot/lib/libcapnp-0.5-dev.so
cp /usr/local/lib/libkj-0.5-dev.so /chroot/lib/libkj-0.5-dev.so
cp /usr/lib/x86_64-linux-gnu/libstdc++.so.6 /chroot/lib/libstdc++.so.6
cp /lib/x86_64-linux-gnu/libm.so.6 /chroot/lib/libm.so.6
cp /lib/x86_64-linux-gnu/libgcc_s.so.1 /chroot/lib/libgcc_s.so.1
# shell
go build -o /chroot/shell github.com/kevinwallace/sandstorm-shell/shell
cp /lib/x86_64-linux-gnu/libpthread.so.0 /chroot/lib/libpthread.so.0
# manifest
capnp eval -I /src/sandstorm-master/src -b /root/manifest.capnp manifest > /chroot/sandstorm-manifest
# package
spk pack /chroot /root/secret.key /output/shell.spk | kevinwallace/sandstorm-shell | make.sh | Shell | mit | 1,231 |
. ~/hulk-bash/scripts/web.sh
. ~/hulk-bash/.aliases
| BennyHallett/hulk-bash | hulk.bash | Shell | mit | 52 |
((n|=2<<1)) | grncdr/js-shell-parse | tests/fixtures/shellcheck-tests/arithmetic3/source.sh | Shell | mit | 11 |
#!/bin/bash
set -xv
## MACS0429
export ending=OCFSRI
export SUBARUDIR=/gpfs/slac/kipac/fs1/u/awright/SUBARU/
export cluster="MACS1115+01"
. ${cluster}.ini
export config="10_3"
export lens='pretty'
export ending="OCFI"
export filter="W-J-B"
./do_coadd_pretty.sh ${cluster} ${filter} 'pretty' 'none' ${ending} 'no' 'no' 2>&1 | tee -a scratch/OUT-coadd_${cluster}.${filter}_pretty.log
export ending="OCFSI"
export filter="W-C-IC"
./do_coadd_pretty.sh ${cluster} ${filter} 'pretty' 'none' ${ending} 'no' 'no' 2>&1 | tee -a scratch/OUT-coadd_${cluster}.${filter}_pretty.log
export ending="OCFSRI"
export filter="W-C-RC"
./do_coadd_pretty.sh ${cluster} ${filter} 'pretty' 'none' ${ending} 'no' 'no' 2>&1 | tee -a scratch/OUT-coadd_${cluster}.${filter}_pretty.log
#/gpfs/slac/kipac/fs1/u/awright/SUBARU/MACS0429-02/W-J-V/SCIENCE/SUPA0043650_9OCFSI.sub.fits
export ending="OCFSI"
export filter="W-J-V"
./do_coadd_pretty.sh ${cluster} ${filter} 'pretty' 'none' ${ending} 'no' 'no' 2>&1 | tee -a scratch/OUT-coadd_${cluster}.${filter}_pretty.log
export ending="OCFSFI"
export filter="W-S-Z+"
./do_coadd_pretty.sh ${cluster} ${filter} 'pretty' 'none' ${ending} 'no' 'no' 2>&1 | tee -a scratch/OUT-coadd_${cluster}.${filter}_pretty.log
exit 0;
export cluster=MACS0429-02
. ${cluster}.ini
export config="10_3"
export lens='pretty'
export ending="OCFI"
export filter="W-J-B"
./do_coadd_pretty.sh ${cluster} ${filter} 'pretty' 'none' ${ending} 'no' 'no' 2>&1 | tee -a OUT-coadd_${cluster}.${filter}_pretty.log
exit 0;
#./do_coadd_pretty.sh ${cluster} W-C-RC 'pretty' 'none' ${ending} 'yes' 'yes' 2>&1 | tee -a OUT-coadd_${cluster}.W-C-RC_pretty.log
export ending="OCFSFI"
export filter="W-C-IC_2006-12-21_CALIB"
./do_coadd_pretty.sh ${cluster} ${filter} 'all 3s ' 'none' ${ending} 'no' 'no' 2>&1 | tee -a OUT-coadd_${cluster}.${filter}_pretty.log
export ending="OCFSFI"
export filter="W-C-IC"
./do_coadd_pretty.sh ${cluster} ${filter} 'pretty' 'none' ${ending} 'no' 'no' 2>&1 | tee -a OUT-coadd_${cluster}.${filter}_pretty.log
export ending="OCFSI"
export filter="W-C-RC_2009-01-23_CALIB"
./do_coadd_pretty.sh ${cluster} ${filter} 'all 3s ' 'none' ${ending} 'no' 'no' 2>&1 | tee -a OUT-coadd_${cluster}.${filter}_pretty.log
#/gpfs/slac/kipac/fs1/u/awright/SUBARU/MACS0429-02/W-C-RC/SCIENCE/SUPA0043342_9OCFSRI.sub.fits
export ending="OCFSRI"
export filter="W-C-RC"
./do_coadd_pretty.sh ${cluster} ${filter} 'pretty' 'none' ${ending} 'no' 'no' 2>&1 | tee -a OUT-coadd_${cluster}.${filter}_pretty.log
export ending="OCFI"
export filter="W-J-B_2015-12-15_CALIB"
./do_coadd_pretty.sh ${cluster} ${filter} 'all 3s ' 'none' ${ending} 'no' 'no' 2>&1 | tee -a OUT-coadd_${cluster}.${filter}_pretty.log
#/gpfs/slac/kipac/fs1/u/awright/SUBARU/MACS0429-02/W-J-V/SCIENCE/SUPA0043650_9OCFSI.sub.fits
export ending="OCFSI"
export filter="W-J-V_2009-01-23_CALIB"
./do_coadd_pretty.sh ${cluster} ${filter} 'all 3s' 'none' ${ending} 'no' 'no' 2>&1 | tee -a OUT-coadd_${cluster}.${filter}_pretty.log
export ending="OCFSI"
export filter="W-J-V"
./do_coadd_pretty.sh ${cluster} ${filter} 'pretty' 'none' ${ending} 'no' 'no' 2>&1 | tee -a OUT-coadd_${cluster}.${filter}_pretty.log
export ending="OCFSFI"
export filter="W-S-Z+_2015-12-15_CALIB"
./do_coadd_pretty.sh ${cluster} ${filter} 'all 3s' 'none' ${ending} 'no' 'no' 2>&1 | tee -a OUT-coadd_${cluster}.${filter}_pretty.log
export filter="W-S-Z+"
./do_coadd_pretty.sh ${cluster} ${filter} 'pretty' 'none' ${ending} 'no' 'no' 2>&1 | tee -a OUT-coadd_${cluster}.${filter}_pretty.log
exit 0;
## RXJ2129
. RXJ2129.ini
export ending=OCFSRI
export SUBARUDIR=/gpfs/slac/kipac/fs1/u/awright/SUBARU/
export cluster=RXJ2129
export filter=W-C-RC
export config="10_3"
export lens='pretty'
#./do_coadd_pretty.sh ${cluster} W-C-RC 'pretty' 'none' ${ending} 'yes' 'yes' 2>&1 | tee -a OUT-coadd_${cluster}.W-C-RC_pretty.log
export ending=OCFSRI
./do_coadd_pretty.sh ${cluster} W-J-V 'pretty' 'none' ${ending} 'no' 'no' 2>&1 | tee -a OUT-coadd_${cluster}.W-J-V_pretty.log
export ending=OCFSFI
./do_coadd_pretty.sh ${cluster} W-S-I+ 'pretty' 'none' ${ending} 'no' 'no' 2>&1 | tee -a OUT-coadd_${cluster}.W-S-I+_pretty.log
export ending=OCFSI
./do_coadd_pretty.sh ${cluster} W-J-B 'pretty' 'none' ${ending} 'no' 'no' 2>&1 | tee -a OUT-coadd_${cluster}.W-J-B_pretty.log
export ending=OCFSFI
./do_coadd_pretty.sh ${cluster} W-S-Z+ 'pretty' 'none' ${ending} 'no' 'no' 2>&1 | tee -a OUT-coadd_${cluster}.W-S-Z+_pretty.log
exit 0;
export lens='pretty'
./do_coadd_pretty.sh ${cluster} W-C-RC 'pretty' 'none' ${ending} 'yes' 'yes' 2>&1 | tee -a OUT-coadd_${cluster}.W-C-RC_pretty.log
. Zw2089.ini
export cluster=Zw2089
export ending="OCFI"
#./do_coadd_pretty.sh ${cluster} W-C-RC 'pretty' 'none' ${ending} 'yes' 'yes' 2>&1 | tee -a OUT-coadd_${cluster}.W-C-RC_pretty.log
#./do_coadd_pretty.sh ${cluster} W-S-I+ 'pretty' 'none' ${ending} 'yes' 'yes' 2>&1 | tee -a OUT-coadd_${cluster}.W-S-I+_pretty.log
export ending=OCFSFI
./do_coadd_pretty.sh ${cluster} W-S-Z+ 'pretty' 'none' ${ending} 'yes' 'yes' 2>&1 | tee -a OUT-coadd_${cluster}.W-S-Z+_pretty.log
export ending="OCFI"
./do_coadd_pretty.sh ${cluster} W-J-B 'pretty' 'none' ${ending} 'yes' 'yes' 2>&1 | tee -a OUT-coadd_${cluster}.W-J-B_pretty.log
#./do_coadd_pretty.sh ${cluster} W-J-V 'pretty' 'none' ${ending} 'yes' 'yes' 2>&1 | tee -a OUT-coadd_${cluster}.W-J-V_pretty.log
exit 0;
export cluster=Zw2089
export ending="OCFSIR"
filter=W-S-I+
#./adam_pre_coadd_cleanup.sh Zw2089 W-S-I+
./do_coadd_pretty.sh Zw2089 W-S-I+ "pretty" 'none' ${ending} 2>&1 | tee -a OUT-coadd_Zw2089.${filter}.log
export cluster=Zw2089
export ending=OCFSIR
export filter=W-J-V
rm OUT-coadd_${cluster}.${filter}.log
./adam_pre_coadd_cleanup.sh ${cluster} ${filter}
./do_coadd_batch.sh Zw2089 W-J-V "all good exposure gabodsid" 'Zw2089_good_coadd_conditions.txt' ${ending} 2>&1 | tee -a OUT-coadd_${cluster}.${filter}.log
#/gpfs/slac/kipac/fs1/u/awright/SUBARU/MACS0429-02/W-S-Z+/SCIENCE/SUPA0154653_9OCFSFI.sub.fits
export ending="OCFSFI"
export filter="W-S-Z+"
#/gpfs/slac/kipac/fs1/u/awright/SUBARU/MACS0429-02/W-S-Z+_2015-12-15_CALIB/SCIENCE/SUPA0154638_9OCFSFI.sub.fits
#/gpfs/slac/kipac/fs1/u/awright/SUBARU/MACS0429-02/W-S-Z+_2015-12-15_CALIB/SCIENCE/SUPA0154639_9OCFSFI.sub.fits
./do_coadd_pretty.sh ${cluster} ${filter} 'pretty' 'none' ${ending} 'no' 'no' 2>&1 | tee -a OUT-coadd_${cluster}.${filter}_pretty.log
| deapplegate/wtgpipeline | make_pretty_fgas_coadd.sh | Shell | mit | 6,379 |
#!/bin/bash
. 'functions.sh'
print "Building site............"
GIT=`which git`
# Clone repo and add symblink
if [ ! -d $ROOTFS/app/project/docroot ]
then
print "Downloading latest Drupal Core ..."
exec 'wget -O - http://ftp.drupal.org/files/projects/drupal-7.39.tar.gz | tar zxf -'
exec 'mv drupal-7.39 app/project/docroot'
exec 'ln -s project/docroot app/docroot'
print "Cloning git repo ..."
exec "$GIT clone [email protected]:kurobits/condo-profile.git app/project/docroot/profiles/condo"
DEFAULT_DIR="$ROOTFS/app/project/docroot/sites/default"
print "Adding config and files directory ..."
exec "mkdir -p $DEFAULT_DIR/files"
exec "chmod a+rw $DEFAULT_DIR/files"
exec "cp $DEFAULT_DIR/default.settings.php $DEFAULT_DIR/settings.php"
echo '' >> $DEFAULT_DIR/settings.php
echo '// read local settings' >> $DEFAULT_DIR/settings.php
echo 'if (file_exists(__DIR__ . "/local.settings.php")) {' >> $DEFAULT_DIR/settings.php
echo ' require(__DIR__ . "/local.settings.php");' >> $DEFAULT_DIR/settings.php
echo '}' >> $DEFAULT_DIR/settings.php
print "Copying local settings for site ..."
exec "cp $ROOTFS/local.settings.php $DEFAULT_DIR"
print ''
print '----------------------------------------------------------------------'
print 'Levantar contenedores con ./d4d up y luego entrar a:'
print 'http://localhost:8000/install.php'
fi
# Add local settings and files directory
| cesarmiquel/mu-docker-drupal | build-site.sh | Shell | mit | 1,464 |
#!/bin/bash
# data in Empar_paper/data/simul_balanc4GenNonhSSM
#length1000_b100.tar length1000_b150.tar length1000_b200.tar
#length1000_b100_num98.fa
MOD=ssm
ITER=2 # number of data sets
bl=100
#prep output files
OUT_lik='likel_balanced4_gennonh_'$bl'_'$MOD'_E.txt'
OUT_iter='iter_balanced4_gennonh_'$bl'_'$MOD'_E.txt'
OUT_time='time_balanced4_gennonh_'$bl'_'$MOD'_E.txt'
OUT_nc='neg_cases_balanced4_gennonh_'$bl'_'$MOD'_E.txt'
[[ -f $OUT_lik ]] && rm -f $OUT_lik
[[ -f $OUT_iter ]] && rm -f $OUT_iter
[[ -f $OUT_time ]] && rm -f $OUT_time
[[ -f $OUT_nc ]] && rm -f $OUT_nc
touch $OUT_lik
touch $OUT_iter
touch $OUT_time
touch $OUT_nc
# run from within the scripts folder
for i in $(seq 0 1 $ITER)
do
#extract a single file from tar
tar -xvf ../data/simul_balanc4GenNonhSSM/length1000_b$bl.tar length1000_b$bl\_num$i.fa
./main ../data/trees/treeE.tree length1000_b$bl\_num$i.fa $MOD > out.txt
cat out.txt | grep Likelihood | cut -d':' -f2 | xargs >> $OUT_lik
cat out.txt | grep Iter | cut -d':' -f2 | xargs >> $OUT_iter
cat out.txt | grep Time | cut -d':' -f2 | xargs >> $OUT_time
cat out.txt | grep "negative branches" | cut -d':' -f2 | xargs >> $OUT_nc
rm out.txt
# not poluting the folder with single files
rm length1000_b$bl\_num$i.fa
done
mv $OUT_time ../results/ssm/gennonh_data/balanc4GenNonh/.
mv $OUT_lik ../results/ssm/gennonh_data/balanc4GenNonh/.
mv $OUT_iter ../results/ssm/gennonh_data/balanc4GenNonh/.
mv $OUT_nc ../results/ssm/gennonh_data/balanc4GenNonh/.
| Algebraicphylogenetics/Empar_paper | scripts/process_balanced4_gennonh_ssm.sh | Shell | mit | 1,509 |
LIBEVENT_VERSION="2.1.11-stable"
LIBEVENT_SHA256SUM="a65bac6202ea8c5609fd5c7e480e6d25de467ea1917c08290c521752f147283d"
rm -fR libevent*
getpkg https://github.com/libevent/libevent/releases/download/release-${LIBEVENT_VERSION}/libevent-${LIBEVENT_VERSION}.tar.gz $LIBEVENT_SHA256SUM
tar zxvf libevent-${LIBEVENT_VERSION}.tar.gz
cd libevent-${LIBEVENT_VERSION}
./configure --prefix=$VENV
$PMAKE
make install
| mattbillenstein/ve | pkgs/available/libevent.sh | Shell | mit | 409 |
#!/bin/bash
# this causes the script to exit if any line causes an error. if there are badly-behaved bits of script that you want to ignore, you can run "set +e" and then "set -e" again afterwards.
set -e
# setting the variable stylefile to be the string on the RHS of =. you can't have spaces around the =, annoyingly.
# strings are either with double-quotes "" or single quotes ''. the difference is that the double quotes will substitute variables, e.g: if stylefile="x" then "foo_${stylefile}" is "foo_x", but 'foo_${stylefile}' is just 'foo_${stylefile}'
stylefile="targeted-editing/scripts/default.style"
# what i'm trying to do here is make it so that we can run the script as if we typed: import_db.sh database input query1 query2 ... queryN
# and the variables in the script get set as: dbname="database" inputfile="input" and $*="query1 query2 ... queryN"
# $1, $2, etc... are the first, second ... arguments to the script
dbname=$1 # array[1]
inputfile=$2 # array[2]
# shift offsets the arguments, so that after running "shift 2", what used to be $3 is now $1, what used to be $4 is now $2, and so forth
shift 2
# these are totally equivalent:
# dropdb --if-exists $dbname;
# dropdb --if-exists "$dbname";
# dropdb --if-exists ${dbname};
# dropdb --if-exists "${dbname}";
dropdb --if-exists $dbname;
# replace "user" below with your user name
createdb -E UTF-8 -O user $dbname;
psql -c "create extension postgis; create extension hstore; create extension btree_gist" $dbname;
# replace "user" below with your user name and adjust the amount of RAM you wish to allocate up or down from 12000(MB)
osm2pgsql -S $stylefile -d $dbname -C 12000 -s -G -x -k -K -U user -H /tmp $inputfile;
# for (var i = 0; i < array.length; i++) {
# var query = array[i];
for query in $*; do
echo "QUERY $query against database $dbname";
# `` is like a subselect, everything between the `` characters gets executed and replaced by whatever they output
# basename is a function which returns the file part of the filename, rather than the full path. so we can write "$(basename /very/long/path/with/lots/of/slashes.txt)" and it returns "slashes.txt"
query_base=`echo "$(basename $query)" | sed 's/\.sql//'`;
# execute the query and put its results ('>') in the file called "${dbname}_${query_base}.txt", so for a database called "new_york" and a query file called "fitness.sql", the output file would be "new_york_fitness.txt"
psql -f $query --quiet -t --no-align -F , $dbname | sed "s/^/${dbname},${query_base},/" > ${dbname}_${query_base}.txt;
done
| mapzen-data/targeted-editing | scripts/import_db.sh | Shell | mit | 2,564 |
#!/bin/sh
# seems there are problems in tab-replacement
# this is the mac-version
sed 's/ / \& /g' ../temp/mpfr.d.column > ../temp/mpfr.d.amp
sed 's/ / \& /g' ../temp/mpfr.h.column > ../temp/mpfr.h.amp
sed 's/ / \& /g' ../temp/jur.d.column > ../temp/jur.d.amp
sed 's/ / \& /g' ../temp/jur.h.column > ../temp/jur.h.amp
sed 's/ / \& /g' ../temp/medical.d.column > ../temp/medical.d.amp
sed 's/ / \& /g' ../temp/medical.h.column > ../temp/medical.h.amp
sed 's/ / \& /g' ../temp/multi.d.column > ../temp/multi.d.amp
sed 's/ / \& /g' ../temp/multi.h.column > ../temp/multi.h.amp
sed 's/ / \& /g' ../temp/short_1d21.d.column > ../temp/short_1d21.d.amp
sed 's/ / \& /g' ../temp/short_1d21.h.column > ../temp/short_1d21.h.amp
| michal-fre/refugee-phrasebook.github.io | bash-scripts-for-pdf-generation/scripts/06_replace_tabulator_with_ampersand_MAC.sh | Shell | mit | 736 |
for f in $@
do
echo $f
out=`basename $f | sed 's/.fa$//g'`
echo blastp -outfmt '6 qseqid sseqid evalue' -num_threads 16 -query $f -db uniprot/uniprot_all.fasta -out "blast/$out.blast"
done
| Dill-PICL/maize-GAMER | cafa_sw/argot2/run_blast.sh | Shell | mit | 192 |
#!/bin/sh
usage() {
cat <<EOF
USAGE: \${DOTFILES}/shell/csh/launcher.sh <function> [arg ...]
FUNCTION:
exit <return_code>
EOF
}
# Print usage if not specify the function that need to run.
if [ $# -eq 0 ]; then
usage
exit 1
fi
# The function name is the first argument.
func_name=${1}
# Remove the first argument "function name".
shift 1
# Dispatch to the corresponding function.
case ${func_name} in
exit)
exit ${@}
;;
*)
cat <<EOF
Error: Specify unknow function "${func_name}",
this shell script now only support run function "exit".
EOF
usage
exit 1
;;
esac
| Johnson9009/dotfiles | shell/csh/launcher.sh | Shell | mit | 642 |
#!/bin/bash -ex
cd "$(dirname "$0")"
docker run --rm \
-v "$PWD/.:/work" \
-w "/work" \
ruby:2.5 bash -ec "
gem install -N parse_a_changelog
parse ./CHANGELOG.md
"
| conjurinc/summon-chefapi | parse-changelog.sh | Shell | mit | 183 |
#!/usr/bin/env bash
PIDFILE="$HOME/.brianbondy_nodejs.pid"
if [ -e "${PIDFILE}" ] && (ps -u $USER -f | grep "[ ]$(cat ${PIDFILE})[ ]"); then
echo "Already running."
exit 99
fi
PATH=/home/tweetpig/webapps/brianbondy_node/bin:$PATH LD_LIBRARY_PATH=/home/tweetpig/lib/libgif NODE_ENV=production PORT=32757 /home/tweetpig/webapps/brianbondy_node/bin/node --harmony --max-old-space-size=200 /home/tweetpig/webapps/brianbondy_node/dist/server.js --brianbondy_node > $HOME/.brianbondy_nodejs.log &
echo $! > "${PIDFILE}"
chmod 644 "${PIDFILE}"
| bbondy/brianbondy.node | webfaction/watchdog.sh | Shell | mit | 545 |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$CONFIGURATION_BUILD_DIR/WTCalendarController/WTCalendarController.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$CONFIGURATION_BUILD_DIR/WTCalendarController/WTCalendarController.framework"
fi
| sbishopwta/WTCalendarController | Example/Pods/Target Support Files/Pods-WTCalendarController_Example/Pods-WTCalendarController_Example-frameworks.sh | Shell | mit | 3,624 |
#!/bin/sh
_now=$(date +"%m_%d_%Y_%H_%M_%S")
PATH=$PATH:/usr/local/bin
export PATH
cd $HOME/Code/newspost/crawlers/crawlers && $HOME/Installs/envs/newspost/bin/scrapy runspider spiders/huffingtonpost.py -o feeds/huffingtonpost-$_now.json
wait
cd $HOME/Code/newspost/crawlers/crawlers && $HOME/Installs/envs/newspost/bin/scrapy runspider spiders/iamwire.py -o feeds/iamwire-$_now.json
wait
cd $HOME/Code/newspost/crawlers/crawlers && $HOME/Installs/envs/newspost/bin/scrapy runspider spiders/vccircle.py -o feeds/vccircle-$_now.json
wait
cd $HOME/Code/newspost/crawlers/crawlers && $HOME/Installs/envs/newspost/bin/scrapy runspider spiders/moneycontrol.py -o feeds/moneycontrol-$_now.json | Newsrecommender/newsrecommender | ArticleRecommendationProject/Crawlers/crawlers/scripts/scrape_list_3.sh | Shell | mit | 689 |
#!/bin/sh
tokenizer()
{
STRNG="${1}"
DELIM="${2}"
while :
do
NEW="${STRNG%${DELIM}}"
while case "$NEW" in
*${DELIM}*);;
*)break;;
esac
do NEW="${NEW%${DELIM}*}"
done
TOKEN="${NEW%${DELIM}*}"
STRNG="${STRNG#${TOKEN}${DELIM}}"
printf "%s\n" "$TOKEN"
case "$STRNG" in
*${DELIM}*) ;;
*) [ -n "$d" ] && break || d="1" ;;
esac
done
}
which(){
for i in $(tokenizer $PATH ":" )
do
#echo "${i}" # test directory walk
[ -d "${i}/${1}" ] && break
[ -x "${i}/${1}" ] && echo "${i}/${1}" && exit
done
}
which $@
| csitd/shell-utils | which.sh | Shell | mit | 732 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-2950-5
#
# Security announcement date: 2016-05-25 00:00:00 UTC
# Script generation date: 2017-01-01 21:05:27 UTC
#
# Operating System: Ubuntu 14.04 LTS
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - samba:2:4.3.9+dfsg-0ubuntu0.14.04.3
#
# Last versions recommanded by security team:
# - samba:2:4.3.11+dfsg-0ubuntu0.14.04.4
#
# CVE List:
# - CVE-2015-5370
# - CVE-2016-2110
# - CVE-2016-2111
# - CVE-2016-2112
# - CVE-2016-2113
# - CVE-2016-2114
# - CVE-2016-2115
# - CVE-2016-2118
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade samba=2:4.3.11+dfsg-0ubuntu0.14.04.4 -y
| Cyberwatch/cbw-security-fixes | Ubuntu_14.04_LTS/x86_64/2016/USN-2950-5.sh | Shell | mit | 794 |
#!/usr/bin/env bash
# Run a raspberry pi as ulnoiot gateway (wifi router and mqtt_broker)
#
# To enable this,
# make sure ulnoiot-run script is porperly setup (for example in /home/pi/bin)
# add the following to the end of /etc/rc.local with adjusted location of the
# run-script:
# export PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
# /home/pi/bin/ulnoiot exec /home/pi/ulnoiot/lib/system_boot/raspi-boot.sh"
#
# Also disable all network devices in /etc/network/interfaces apart lo and wlan1
# and make sure that wlan1 configuration looks like this (replace /home/pi/ulnoiot
# with the respective ULNOIOT_ROOT):
# allow-hotplug wlan1
# iface wlan1 inet manual
# pre-up /home/pi/bin/ulnoiot exec /home/pi/ulnoiot/lib/system_boot/raspi-pre-up.sh
# wpa-conf /run/uiot_wpa_supplicant.conf
[ "$ULNOIOT_ACTIVE" = "yes" ] || { echo "ulnoiot not active, aborting." 1>&2;exit 1; }
source "$ULNOIOT_ROOT/bin/read_boot_config"
# Try to guess user
if [[ $ULNOIOT_ROOT =~ '/home/([!/]+)/ulnoiot' ]]; then
ULNOIOT_USER=${BASH_REMATCH[1]}
else
ULNOIOT_USER=ulnoiot
fi
if [[ "ULNOIOT_AP_PASSWORD" ]]; then # pw was given, so start an accesspoint
# start accesspoint and mqtt_broker
(
sleep 15 # let network devices start
cd "$ULNOIOT_ROOT"
tmux new-session -d -n AP -s UIoTSvrs \
"./run" exec accesspoint \; \
new-window -d -n MQTT \
"./run" exec mqtt_broker \; \
new-window -d -n nodered \
su - $ULNOIOT_USER -c 'ulnoiot exec nodered_starter' \; \
new-window -d -n cloudcmd \
su - $ULNOIOT_USER -c 'ulnoiot exec cloudcmd_starter' \; \
new-window -d -n dongle \
su - $ULNOIOT_USER -c 'ulnoiot exec dongle_starter' \;
) &
fi # accesspoint check
| ulno/ulnoiot | lib/system_boot/raspi-boot.sh | Shell | mit | 1,840 |
#!/usr/bin/env bash
PYTHONPATH=. DJANGO_SETTINGS_MODULE=sampleproject.settings py.test --create-db
| RealGeeks/django-cache-purge-hooks | runtests.sh | Shell | mit | 99 |
docker run -d \
--name=sickbeard \
-v $(pwd)/data:/data \
-v $(pwd)/config/config.ini:/app/config.ini \
-p 8081:8081 \
chamunks/alpine-sickbeard-arm:latest
| chamunks/alpine-sickbeard-arm | run.sh | Shell | mit | 171 |
#!/bin/bash
#
# bash strict mode
set -euo pipefail
IFS=$'\n\t'
USAGE="Usage:\n
Requires AWS CLI tools and credentials configured.\n
./tool.sh install mySourceDirectory\n
./tool.sh create mySourceDirectory myAWSLambdaFunctionName myIAMRoleARN\n
./tool.sh update mySourceDirectory myAWSLambdaFunctionName\n
./tool.sh invoke myAWSLambdaFunctionName\n
"
REGION="eu-west-1"
PROFILE="heap"
# Install pip requirements for a Python lambda
function install_requirements {
FUNCTION_DIRECTORY=$2
cd $FUNCTION_DIRECTORY
pip install -r requirements.txt -t .
}
# Creates a new lambda function
function create {
FUNCTION_DIRECTORY=$2
FUNCTION_ARN_NAME=$3
ROLE_ARN=$4
mkdir -p build
cd $FUNCTION_DIRECTORY
zip -FSr ../build/$FUNCTION_DIRECTORY.zip .
cd ..
aws lambda create-function\
--function-name $FUNCTION_ARN_NAME\
--runtime python2.7\
--role $4\
--handler main.lambda_handler\
--timeout 15\
--memory-size 128\
--zip-file fileb://build/$FUNCTION_DIRECTORY.zip
}
# Packages and uploads the source code of a AWS Lambda function and deploys it live.
function upload_lambda_source {
FUNCTION_DIRECTORY=$2
FUNCTION_ARN_NAME=$3
mkdir -p build
cd $FUNCTION_DIRECTORY
zip -FSr ../build/$FUNCTION_DIRECTORY.zip .
cd ..
aws lambda update-function-code --profile $PROFILE --region $REGION --function-name $FUNCTION_ARN_NAME --zip-file fileb://build/$FUNCTION_DIRECTORY.zip
}
# Invokes an AWS Lambda function and outputs its result
function invoke {
FUNCTION_ARN_NAME=$2
aws lambda invoke --profile $PROFILE --region $REGION --function-name $FUNCTION_ARN_NAME /dev/stdout
}
function help_and_exit {
echo -e $USAGE
exit 1
}
# Subcommand handling
if [ $# -lt 1 ]
then
help_and_exit
fi
case "$1" in
install)
if (( $# == 2 )); then
install_requirements "$@"
else
help_and_exit
fi
;;
create)
if (( $# == 4 )); then
create "$@"
else
help_and_exit
fi
;;
update)
if (( $# == 3 )); then
upload_lambda_source "$@"
else
help_and_exit
fi
;;
invoke)
if (( $# == 2 )); then
invoke "$@"
else
help_and_exit
fi
;;
*)
echo "Error: No such subcommand"
help_and_exit
esac
| Vilsepi/after | backend/tool.sh | Shell | mit | 2,234 |
python -m unittest
| nrdhm/max_dump | test.sh | Shell | mit | 20 |
#!/bin/sh
echo "Stopping web-server ..."
COUNT_PROCESS=1
while [ $COUNT_PROCESS -gt 0 ]
do
COUNT_PROCESS=`ps -Aef | grep node | grep -c server.js`
if [ $COUNT_PROCESS -gt 0 ]; then
PID_PROCESS=`ps -Aef | grep node | grep server.js | awk '{print $2}'`
if [ ! -z "$PID_PROCESS" ]; then
echo "Killing web server PID=$PID_PROCESS"
kill "$PID_PROCESS"
fi
fi
echo "Waiting on web-server to stop ..."
sleep 1
done
echo "This web-server is stopped"
exit 0 | stefanreichhart/tribe | src/scripts/stopServer.sh | Shell | mit | 468 |
#! /bin/bash
defaults write com.apple.finder AppleShowAllFiles FALSE
killall Finder
| worp1900/niftySnippets | console/bash/mac/hideHiddenFilesInFinder.sh | Shell | mit | 84 |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/SWXMLHash-watchOS/SWXMLHash.framework"
install_framework "$BUILT_PRODUCTS_DIR/SwiftBus-watchOS/SwiftBus.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/SWXMLHash-watchOS/SWXMLHash.framework"
install_framework "$BUILT_PRODUCTS_DIR/SwiftBus-watchOS/SwiftBus.framework"
fi
| victorwon/SwiftBus | Example/Pods/Target Support Files/Pods-SwiftBus Watch Example Extension/Pods-SwiftBus Watch Example Extension-frameworks.sh | Shell | mit | 3,785 |
#!/usr/bin/env bash
# Copyright (c) 2016 Ericsson AB
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# $1 = a file or a directory
NAME=$(basename $0)
function _sed() {
sed -i 's/#\s*include\s\(\"\|<\).*\/\(.*\)\(\"\|>\).*/#include \"\2\"/g' $@
}
if [ -f $1 ]; then
_sed $1
elif [ -d $1 ]; then
# Enter directory
cd $1
# Save all files in one array
files=($(find -type f))
number_of_files=${#files[*]}
[ $number_of_files -eq 0 ] && exit
_sed ${files[*]}
fi
| PatrikAAberg/dmce | dmce-remove-relpaths.sh | Shell | mit | 1,477 |
#!/bin/bash
SCRIPT_PATH="${BASH_SOURCE[0]}";
if ([ -h "${SCRIPT_PATH}" ]) then
while([ -h "${SCRIPT_PATH}" ]) do SCRIPT_PATH=`readlink "${SCRIPT_PATH}"`; done
fi
pushd . > /dev/null
cd `dirname ${SCRIPT_PATH}` > /dev/null
SCRIPT_PATH=`pwd`;
popd > /dev/null
if ! [ -f $SCRIPT_PATH/.nuget/nuget.exe ]
then
wget "https://www.nuget.org/nuget.exe" -P $SCRIPT_PATH/.nuget/
fi
mono $SCRIPT_PATH/.nuget/nuget.exe update -self
SCRIPT_PATH="${BASH_SOURCE[0]}";
if ([ -h "${SCRIPT_PATH}" ]) then
while([ -h "${SCRIPT_PATH}" ]) do SCRIPT_PATH=`readlink "${SCRIPT_PATH}"`; done
fi
pushd . > /dev/null
cd `dirname ${SCRIPT_PATH}` > /dev/null
SCRIPT_PATH=`pwd`;
popd > /dev/null
mono $SCRIPT_PATH/.nuget/NuGet.exe update -self
mono $SCRIPT_PATH/.nuget/NuGet.exe install FAKE -OutputDirectory $SCRIPT_PATH/packages -ExcludeVersion -Version 4.16.1
mono $SCRIPT_PATH/.nuget/NuGet.exe install xunit.runner.console -OutputDirectory $SCRIPT_PATH/packages/FAKE -ExcludeVersion -Version 2.0.0
mono $SCRIPT_PATH/.nuget/NuGet.exe install NUnit.Console -OutputDirectory $SCRIPT_PATH/packages/FAKE -ExcludeVersion -Version 3.2.1
mono $SCRIPT_PATH/.nuget/NuGet.exe install NBench.Runner -OutputDirectory $SCRIPT_PATH/packages -ExcludeVersion -Version 0.3.1
if ! [ -e $SCRIPT_PATH/packages/SourceLink.Fake/tools/SourceLink.fsx ] ; then
mono $SCRIPT_PATH/.nuget/NuGet.exe install SourceLink.Fake -OutputDirectory $SCRIPT_PATH/packages -ExcludeVersion
fi
export encoding=utf-8
mono $SCRIPT_PATH/packages/FAKE/tools/FAKE.exe build.fsx "$@"
| Horusiath/Hyperion | build.sh | Shell | mit | 1,543 |
# Erase duplicate entries from history
export HISTCONTROL="erasedups"
# Increase history size
export HISTSIZE="10000" | ridobe/dotfiles | bash/environment.sh | Shell | mit | 118 |
#!/bin/bash
QBIN=$(which qdyn5_r8)
OK="(\033[0;32m OK \033[0m)"
FAILED="(\033[0;31m FAILED \033[0m)"
steps=( $(ls -1v *inp | sed 's/.inp//') )
for step in ${steps[@]}
do
echo "Running step ${step}"
if ${QBIN} ${step}.inp > ${step}.log
then echo -e "$OK"
cp ${step}.re ${step}.re.rest
else
echo -e "$FAILED"
echo "Check output (${step}.log) for more info."
exit 1
fi
done
| mpurg/qtools | docs/tutorials/seminar_2017_03_08/data/2-fep/run_q_local.sh | Shell | mit | 396 |
#!/bin/bash
f="$1"
d="$2"
CURRENT_DIR=$( pushd $(dirname $0) >/dev/null; pwd; popd >/dev/null )
if [ ! -d $d ]; then
echo "$d is not found"
exit 2
fi
F="$d/$f"
if [ -f "$F" ]; then
s1=`wc -c "$f" | cut -d ' ' -f 1`
s2=`wc -c "$F" | cut -d ' ' -f 1`
if [ $s1 -eq $s2 ]; then
cksum1=`md5sum -b "$f" | cut -d ' ' -f 1`
cksum2=`md5sum -b "$F" | cut -d ' ' -f 1`
if [ "$cksum1" == "$cksum2" ]; then
rm -v "$f"
else
echo "\"$F\" exists, has the same size, but different md5sum than \"$f\""
fi
else
echo "\"$F\" exists and has differrent size than \"$f\""
echo "\"$F\" - $s2"
echo "\"$f\" - $s1"
fi
else
echo "\"$F\" does not exist"
fi
| pavel-voinov/ghost-tools | check_file.sh | Shell | mit | 693 |
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for DSA-3663-1
#
# Security announcement date: 2016-09-09 00:00:00 UTC
# Script generation date: 2017-01-01 21:08:13 UTC
#
# Operating System: Debian 8 (Jessie)
# Architecture: i386
#
# Vulnerable packages fix on version:
# - xen:4.4.1-9+deb8u7
# - libxen-4.4:4.4.1-9+deb8u7
# - libxenstore3.0:4.4.1-9+deb8u7
# - libxen-dev:4.4.1-9+deb8u7
# - xenstore-utils:4.4.1-9+deb8u7
# - xen-utils-common:4.4.1-9+deb8u7
# - xen-utils-4.4:4.4.1-9+deb8u7
# - xen-hypervisor-4.4-amd64:4.4.1-9+deb8u7
# - xen-system-amd64:4.4.1-9+deb8u7
#
# Last versions recommanded by security team:
# - xen:4.4.1-9+deb8u7
# - libxen-4.4:4.4.1-9+deb8u7
# - libxenstore3.0:4.4.1-9+deb8u7
# - libxen-dev:4.4.1-9+deb8u7
# - xenstore-utils:4.4.1-9+deb8u7
# - xen-utils-common:4.4.1-9+deb8u7
# - xen-utils-4.4:4.4.1-9+deb8u7
# - xen-hypervisor-4.4-amd64:4.4.1-9+deb8u7
# - xen-system-amd64:4.4.1-9+deb8u7
#
# CVE List:
# - CVE-2016-7092
# - CVE-2016-7094
# - CVE-2016-7154
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade xen=4.4.1-9+deb8u7 -y
sudo apt-get install --only-upgrade libxen-4.4=4.4.1-9+deb8u7 -y
sudo apt-get install --only-upgrade libxenstore3.0=4.4.1-9+deb8u7 -y
sudo apt-get install --only-upgrade libxen-dev=4.4.1-9+deb8u7 -y
sudo apt-get install --only-upgrade xenstore-utils=4.4.1-9+deb8u7 -y
sudo apt-get install --only-upgrade xen-utils-common=4.4.1-9+deb8u7 -y
sudo apt-get install --only-upgrade xen-utils-4.4=4.4.1-9+deb8u7 -y
sudo apt-get install --only-upgrade xen-hypervisor-4.4-amd64=4.4.1-9+deb8u7 -y
sudo apt-get install --only-upgrade xen-system-amd64=4.4.1-9+deb8u7 -y
| Cyberwatch/cbw-security-fixes | Debian_8_(Jessie)/i386/2016/DSA-3663-1.sh | Shell | mit | 1,783 |
#!/usr/bin/env bash
mkdir -p target/sandboxjava9jlink;
/usr/lib/jvm/java-9-oracle/bin/javac \
--module-path ./../java9module/target/sandboxjava9module \
-d target/sandboxjava9jlink \
$(find ./src/main/java -name "*.java")
| banadiga/sandbox | java9jlink/javac.sh | Shell | mit | 237 |
python tile_grab2.py -b "35.247;32.130;42.786;37.676" -z 9 -i false -d "syria_satellite" -u "https://api.mapbox.com/styles/v1/mapbox/satellite-streets-v9/tiles/{z}/{x}/{y}?access_token=pk.eyJ1Ijoic3BhdGlhbG5ldHdvcmtzIiwiYSI6ImNpcW83Mm1kYjAxZ3hmbm5ub2llYnNuMmkifQ.an57h9ykokxNlGArcWQztw" -f jpg
python tile_grab2.py -b "35.247;32.130;42.786;37.676" -z 10 -i false -d "syria_satellite" -u "https://api.mapbox.com/styles/v1/mapbox/satellite-streets-v9/tiles/{z}/{x}/{y}?access_token=pk.eyJ1Ijoic3BhdGlhbG5ldHdvcmtzIiwiYSI6ImNpcW83Mm1kYjAxZ3hmbm5ub2llYnNuMmkifQ.an57h9ykokxNlGArcWQztw" -f jpg
python tile_grab2.py -b "35.247;32.130;42.786;37.676" -z 11 -i false -d "syria_satellite" -u "https://api.mapbox.com/styles/v1/mapbox/satellite-streets-v9/tiles/{z}/{x}/{y}?access_token=pk.eyJ1Ijoic3BhdGlhbG5ldHdvcmtzIiwiYSI6ImNpcW83Mm1kYjAxZ3hmbm5ub2llYnNuMmkifQ.an57h9ykokxNlGArcWQztw" -f jpg
python tile_grab2.py -b "35.247;32.130;42.786;37.676" -z 12 -i false -d "syria_satellite" -u "https://api.mapbox.com/styles/v1/mapbox/satellite-streets-v9/tiles/{z}/{x}/{y}?access_token=pk.eyJ1Ijoic3BhdGlhbG5ldHdvcmtzIiwiYSI6ImNpcW83Mm1kYjAxZ3hmbm5ub2llYnNuMmkifQ.an57h9ykokxNlGArcWQztw" -f jpg
python tile_grab2.py -b "35.247;32.130;42.786;37.676" -z 13 -i false -d "syria_satellite" -u "https://api.mapbox.com/styles/v1/mapbox/satellite-streets-v9/tiles/{z}/{x}/{y}?access_token=pk.eyJ1Ijoic3BhdGlhbG5ldHdvcmtzIiwiYSI6ImNpcW83Mm1kYjAxZ3hmbm5ub2llYnNuMmkifQ.an57h9ykokxNlGArcWQztw" -f jpg
python tile_grab2.py -b "35.247;32.130;42.786;37.676" -z 14 -i false -d "syria_satellite" -u "https://api.mapbox.com/styles/v1/mapbox/satellite-streets-v9/tiles/{z}/{x}/{y}?access_token=pk.eyJ1Ijoic3BhdGlhbG5ldHdvcmtzIiwiYSI6ImNpcW83Mm1kYjAxZ3hmbm5ub2llYnNuMmkifQ.an57h9ykokxNlGArcWQztw" -f jpg
| geobabbler/tile-grab | scrape2.sh | Shell | mit | 1,769 |
#!/bin/mksh
# (c) alexh 2016
set -eu
printf "%s\n\n" 'Content-type: text/plain'
# Optional with use of 'check_interval'
WANTED_INTERVAL='10'
USER="$( /usr/bin/whoami )"
HOMES_DIR='/home'
WWW_DIR="/var/www/virtual/${USER}"
HOME="${HOMES_DIR}/${USER}"
VAR_DIR="${HOME}/var/git-publish"
SRC_DIR="${HOME}/git"
function identify_service {
case "${HTTP_USER_AGENT}" in
send_post_manual)
printf "%s\n" 'Service identified as send_post_manual. Hi!'
. "${VAR_DIR}"/read_post_manual
;;
GitHub-Hookshot/*)
printf "%s\n" 'Service identified as GitHub.'
. "${VAR_DIR}"/read_post_github
;;
*)
printf "%s\n" "I don't know service ${HTTP_USER_AGENT}."
exit 73
;;
esac
}
POST="$(cat)"
if [ -z "${POST}" ]; then
printf "%s\n" 'POST empty'
exit 70
fi
function check_signature {
get_sig
if [ "${SIGNATURE}" == "${POST_SIG}" ]; then
printf "%s\n" 'POST body: Good signature'
else
printf "%s\n" 'POST body: Wrong signature'
exit 79
fi
}
function id_values {
ID_VALUES="$( /bin/grep -E "^${ID}\ " "${VAR_DIR}"/list.txt )"
REPO="$( /bin/awk '{print $1}'<<<"${ID_VALUES}" )"
BRANCH="$( /bin/awk '{print $2}'<<<"${ID_VALUES}" )"
BUILD_FUNCTION="$( /bin/awk '{print $3}'<<<"${ID_VALUES}" )"
URL="$( /bin/awk '{print $4}'<<<"${ID_VALUES}" )"
SECRET_TOKEN="$( /bin/awk '{print $5}'<<<"${ID_VALUES}" )"
REPO_DIR="${VAR_DIR}/${REPO}"
if [ ! -d "${REPO_DIR}" ]; then
mkdir -p "${REPO_DIR}"
fi
}
function check_interval {
CALLTIME="$( /bin/date +%s )"
if [ ! -f "${REPO_DIR}"/last.txt ];then
printf "%d\n" '0' >"${REPO_DIR}"/last.txt
fi
LAST_CALLTIME="$( <"${REPO_DIR}"/last.txt )"
INTERVAL="$(( ${CALLTIME} - ${LAST_CALLTIME} ))"
TIME_LEFT="$(( ${WANTED_INTERVAL} - ${INTERVAL} ))"
if [ ! -f "${REPO_DIR}"/waiting.txt ];then
printf "%d\n" '0' >"${REPO_DIR}"/waiting.txt
fi
WAITING="$( <"${REPO_DIR}"/waiting.txt )"
if [ "${WAITING}" == 1 ]; then
CASE='waiting'
else
if (( "${INTERVAL}" > "${WANTED_INTERVAL}" )); then
CASE='ready'
else
CASE='too_soon'
fi
fi
}
function update {
cd "${SRC_DIR}"/"${REPO}"
printf "%s" "Git checkout: "
/usr/bin/git checkout "${BRANCH}"
printf "%s" "Git pull: "
/usr/bin/git pull
. "${VAR_DIR}"/"${BUILD_FUNCTION}"
build
rsync -qaP --del --exclude-from='.gitignore' dest/ "${WWW_DIR}"/"${URL}"/
printf "%s\n" 'Synced'
}
function update_stuff {
case "${CASE}" in
waiting)
printf "Update in queue. %d seconds left.\n" "${TIME_LEFT}"
exit 72
;;
ready)
printf "%s\n" "${CALLTIME}" >"${REPO_DIR}"/last.txt
;;
too_soon)
printf "%d\n" '1' >"${REPO_DIR}"/waiting.txt
TIME_LEFT="$(( ${WANTED_INTERVAL} - ${INTERVAL} ))"
printf "Waiting for %d seconds.\n" "${TIME_LEFT}"
sleep "${TIME_LEFT}"
;;
esac
if [ ! -f "${REPO_DIR}"/progress.txt ]; then
printf "%d\n" '0' >"${REPO_DIR}"/progress.txt
fi
progress="$(<"${REPO_DIR}"/progress.txt)"
while (( "${progress}" == '1' )); do
progress="$(<"${REPO_DIR}"/last.txt)"
printf "%s\n" 'Earlier update in progress. Waiting...'
sleep 1
done
printf "%s\n" 'Ready'
printf "%d\n" '1' >"${REPO_DIR}"/progress.txt
update
printf "%s\n" "${CALLTIME}" >"${REPO_DIR}"/last.txt
printf "%d\n" '0' >"${REPO_DIR}"/progress.txt
printf "%d\n" '0' >"${REPO_DIR}"/waiting.txt
}
identify_service
read_post
id_values
check_signature
CASE='ready'
check_interval
update_stuff
| alexh-name/git-publish | git-publish.sh | Shell | mit | 3,479 |
#export pid=`ps aux | grep python | grep hello_gevent.py | awk 'NR==1{print $2}' | cut -d' ' -f1`;kill -9 $pid
for KILLPID in `ps aux | grep 'python' | grep 'server01' | awk ' { print $2;}'`; do
kill -9 $KILLPID;
done
#ps aux | grep python | grep -v grep | awk '{print $2}' | xargs kill -9
| alexp25/d4w_app_lab | utils/rpi/app_exit.sh | Shell | mit | 296 |
#!/bin/sh
sqlite3 -echo db/test.db < db/schema_context_a.sql
| DDD-Hamburg/ddd-in-legacy-systems | scripts/setup-db.sh | Shell | mit | 61 |
#!/usr/bin/env bash
# build datatheme resources
# usage: $ bash ./publi.sh
# build & deploys datatheme resources to host s3 bucket
# usage: $ bash ./publi.sh put dev.afgo.pgyi
# dependencies:
# aws cli : http://aws.amazon.com/cli/
# nodejs : https://nodejs.org/
# bawlk : https://github.com/tesera/bawlk
datatheme_root=s3://tesera.data.themes
datatheme_path="$datatheme_root/$2"
cp_flags="--acl public-read --cache-control no-cahe"
if [ "$CI_BRANCH" != 'master' ]; then DATATHEME_NAME="$CI_BRANCH.$DATATHEME_NAME"; fi;
echo "processing $CI_BRANCH"
echo "building datapackage.json for datatheme $DATATHEME_NAME"
mkdir -p ./www
node ./build.js $DATATHEME_NAME > ./www/datapackage.json
mkdir ./www/awk ./www/rules
echo "compiling bawlk rules from datapackage.json"
bawlk rules -d ./www/datapackage.json -o ./www/rules
echo "compiling bawlk scripts from datapackage.json"
bawlk scripts -d ./www/datapackage.json -o ./www/awk
if [ "$1" == "put" ]; then
echo "publishing datatheme resources to s3"
aws s3 cp ./www/partials/ $datatheme_path/partials --recursive --content-type text/html $cp_flags
aws s3 cp ./www/index.html $datatheme_path/index.html --content-type text/html $cp_flags
aws s3 cp ./www/datapackage.json $datatheme_path/datapackage.json --content-type application/json $cp_flags
aws s3 cp ./www/awk/ $datatheme_path/awk --recursive --content-type text/plain $cp_flags
aws s3 cp ./www/rules/ $datatheme_path/rules --recursive --content-type text/plain $cp_flags
echo "publishing complete"
fi
echo "done"
| pulsifer/datatheme-mackenzie-pmd | publi.sh | Shell | mit | 1,549 |
# This is needed to render the menu properly, according to user's actions.
# The first menu option has an index of 0. The lower option is, the higher index it has.
selectable-update-current-index() {
# In case Arrow Down was pressed.
if [ "$1" == $SELECTABLE_ARROW_DOWN ]; then
# If there are no more options to choose from, do nothing: the current index will NOT
# be reset to 0 (zero).
if (($(($SELECTABLE_CURRENT_INDEX+1)) < $SELECTABLE_OPTIONS_AMOUNT)); then
# Increment the value.
SELECTABLE_CURRENT_INDEX=$(($SELECTABLE_CURRENT_INDEX+1))
fi
fi
# In case Arrow Up was pressed.
if [ "$1" == $SELECTABLE_ARROW_UP ]; then
# See the condition above. The current index will NOT be set to the latest element
# (option).
if (($(($SELECTABLE_CURRENT_INDEX-1)) >= 0)); then
# Decrement the value.
SELECTABLE_CURRENT_INDEX=$(($SELECTABLE_CURRENT_INDEX-1))
fi
fi
}
| bound1ess/selectable | pieces/update-current-index.sh | Shell | mit | 1,010 |
#!/bin/bash
# This script overcomes a yucky bug in simplescalar's GCC, which
# prevents it from working on the user filesystem (due to a problem
# with the transition to 64-bit). Luckily /tmp is implemented differently
# and doesn'thave this problem so we copy the tree there and do the make there.
TMPNAME=/tmp/SSCA2v2.2-$USER
rm -rf $TMPNAME
# make clean here to avoid confusion
make clean
echo Copying the tree to $TMPNAME so we can build it there
cp -rf ../SSCA2v2.2 $TMPNAME
# now make it in the /tmp directory
pushd $TMPNAME
make CC=/homes/phjk/simplescalar/bin/gcc AR=/homes/phjk/simplescalar/bin/sslittle-na-sstrix-ar RANLIB=/homes/phjk/simplescalar/bin/sslittle-na-sstrix-ranlib
popd
# and link the binary back here
ln -s $TMPNAME/SSCA2
| H7DE/Instruction-Level-Parallelism-CW | SSCA2v2.2/BuildForSimplescalar.sh | Shell | mit | 757 |
#!/usr/bin/env bash
# DETAILS: Invokes rsyncs with well known better options.
# CREATED: 06/29/13 16:14:34 IST
# MODIFIED: 10/24/17 14:36:30 IST
#
# AUTHOR: Ravikiran K.S., [email protected]
# LICENCE: Copyright (c) 2013, Ravikiran K.S.
#set -uvx # Warn unset vars as error, Verbose (echo each command), Enable debug mode
# if rsync gives 'command not found' error, it means that non-interactive bash
# shell on server is unable to find rsync binary. So, use --rsync-path option
# to specify exact location of rsync binary on target server.
# To backup more than just home dir, the include file determines what is to be
# backed up now. The new rsync command (now uses in and excludes and "/" as src)
# $RSYNC -va --delete --delete-excluded --exclude-from="$EXCLUDES" \
# --include-from="$INCLUDES" /$SNAPSHOT_RW/home/daily.0;
# Sync ~/scripts on both eng-shell1 and local server (local being mastercopy)
#rsync -avmz -e ssh ~/scripts/ eng-shell1:~/scripts/
# Source .bashrc.dev only if invoked as a sub-shell. Not if sourced.
[[ "$(basename rsync.sh)" == "$(basename -- $0)" && -f $HOME/.bashrc.dev ]] && { source $HOME/.bashrc.dev; }
# contains a wildcard pattern per line of files to exclude. has no entries -- sync everything
RSYNC_EXCLUDE=$CUST_CONFS/rsyncexclude
# common rsync options
# -a - sync all file perms/attributes
# -h - display output in human readable form
# -i - itemize all changes
# -m - prune empty directories (let's keep them)
# -q - not used as -q hides almost every info
# -R - not used as creates confusion. use relative path names
# -u - skip files newer on destination (don't overwrite by fault)
# -v - not used as -v is too verbose
# -W - don't run diff algorithm. algo consumes lot of CPU and unreliable
# -x - don't go outside filesystem boundaries.
# -z - compress data while sync
# -e ssh - always use ssh for authentication
# --force - for if some operation requires special privileges
# --delete - if any file is absent in source, delete it in dest
# --delete-excluded - delete any files that are excluded in RSYNC_EXCLUDE on dest
# --out-format="%i|%n|" - Display itemized changes in this format.
# --safe-links - ignore symlinks that point outside the tree
RSYNC_OPTS="-ahiuWxz -e ssh --stats --force --delete --safe-links --out-format=%i|%n"
RSYNC_OPTS+=" --log-file=$SCRPT_LOGS/rsync.log --exclude-from=$RSYNC_EXCLUDE"
#RSYNC_OPTS+=" --rsync-path=/homes/raviks/tools/bin/freebsd/rsync"
function rsync_dir()
{
[[ "$#" != "2" ]] && (usage; exit $EINVAL)
SRC_DIR=$1
DST_DIR=$2
[[ "" != "$RSYNC_DRY_RUN" ]] && RSYNC_OPTS+=" -n"
echo "[SYNC] src: $SRC_DIR dst: $DST_DIR"
run rsync $RSYNC_OPTS $SRC_DIR $DST_DIR
unset SRC_DIR DST_DIR
}
function rsync_list()
{
[[ "$#" != "3" ]] && (usage; exit $EINVAL)
# Directory paths in $LIST_FILE are included only if specified with a closing slash. Ex. pathx/pathy/pathz/.
#RSYNC_OPTS+=" --files-from=$LIST_FILE" # this option not supported on freeBSD
LIST_FILE=$1; shift;
# If remote location, dont append /. awk also works: awk '{ print substr( $0, length($0) - 1, length($0) ) }'
tmpSrc=$(echo $1 | sed 's/^.*\(.\)$/\1/')
if [ "$tmpSrc" == ":" ] || [ "$tmpSrc" == "/" ]; then SRC="$1"; else SRC="$1/"; fi
tmpDst=$(echo $2 | sed 's/^.*\(.\)$/\1/')
if [ "$tmpDst" == ":" ] || [ "$tmpDst" == "/" ]; then DST="$2"; else DST="$2/"; fi
for dir in $(cat $LIST_FILE); do
rsync_dir $SRC$dir $DST$dir
done
unset LIST_FILE && unset SRC && unset DST && unset tmpSrc && unset tmpDst
}
usage()
{
echo "usage: rsync.sh [-d|-l <list-file>|-n] <src-dir> <dst-dir>"
echo "Options:"
echo " -r - start recursive rsync between given directory pair"
echo " -l <list-file> - do recursive rsync on all files/directores listed in given file"
echo " -n - enable DRY_RUN during rsync. Gives list of changes to be done"
echo "Note: In list-file, dir path names must be terminated with a / or /."
}
# Each shell script has to be independently testable.
# It can then be included in other files for functions.
main()
{
PARSE_OPTS="hl:rn"
local opts_found=0
while getopts ":$PARSE_OPTS" opt; do
case $opt in
[a-zA-Z0-9])
log DEBUG "-$opt was triggered, Parameter: $OPTARG"
local "opt_$opt"=1 && local "optarg_$opt"="$OPTARG"
;;
\?)
echo "Invalid option: -$OPTARG"; usage; exit $EINVAL;
;;
:)
echo "[ERROR] Option -$OPTARG requires an argument";
usage; exit $EINVAL;
;;
esac
shift $((OPTIND-1)) && OPTIND=1 && local opts_found=1;
done
if ((!opts_found)); then
usage && exit $EINVAL;
fi
((opt_n)) && { export RSYNC_DRY_RUN=TRUE; }
((opt_r)) && { rsync_dir $*; }
((opt_l)) && { rsync_list "$optarg_l $*"; }
((opt_h)) && { usage; exit 0; }
exit 0
}
if [ "$(basename -- $0)" == "$(basename rsync.sh)" ]; then
main $*
fi
# VIM: ts=4:sw=4
| rkks/scripts | bash/rsync.sh | Shell | mit | 5,109 |
#!/bin/bash
## To avoid regression ensure that all of these do not return (unknown)
# annot_t.js
printf "annot_t.js:9:21 = "
assert_ok "$FLOW" type-at-pos annot_t.js 9 21 --strip-root --pretty
# any.js
printf "any.js:3:15 = "
assert_ok "$FLOW" type-at-pos any.js 3 15 --strip-root --pretty
printf "any.js:4:2 = "
assert_ok "$FLOW" type-at-pos any.js 4 2 --strip-root --pretty
printf "any.js:4:6 = "
assert_ok "$FLOW" type-at-pos any.js 4 6 --strip-root --pretty
printf "any.js:5:5 = "
assert_ok "$FLOW" type-at-pos any.js 5 5 --strip-root --pretty
printf "any.js:7:13 = "
assert_ok "$FLOW" type-at-pos any.js 7 13 --strip-root --pretty
printf "any.js:8:5 = "
assert_ok "$FLOW" type-at-pos any.js 8 5 --strip-root --pretty
printf "any.js:8:10 = "
assert_ok "$FLOW" type-at-pos any.js 8 10 --strip-root --pretty
printf "any.js:9:10 = "
assert_ok "$FLOW" type-at-pos any.js 9 10 --strip-root --pretty
# array.js
# TODO `Array` is not populated in type_tables
# printf "array.js:3:18 = "
# assert_ok "$FLOW" type-at-pos array.js 3 18 --strip-root --pretty
# TODO `$ReadOnlyArray` is not populated in type_tables
# printf "array.js:4:30 = "
# assert_ok "$FLOW" type-at-pos array.js 4 30 --strip-root --pretty
printf "array.js:6:15 = "
assert_ok "$FLOW" type-at-pos array.js 6 15 --strip-root --pretty
printf "array.js:10:15 = "
assert_ok "$FLOW" type-at-pos array.js 10 15 --strip-root --pretty
printf "array.js:15:4 = "
assert_ok "$FLOW" type-at-pos array.js 15 4 --strip-root --pretty
printf "array.js:19:4 = "
assert_ok "$FLOW" type-at-pos array.js 19 4 --strip-root --pretty
printf "array.js:23:4 = "
assert_ok "$FLOW" type-at-pos array.js 23 4 --strip-root --pretty
# new-array.js
printf "new-array.js:3:15 = "
assert_ok "$FLOW" type-at-pos new-array.js 3 15 --strip-root --pretty
# class-0.js
printf "class-0.js:3:7 = "
assert_ok "$FLOW" type-at-pos class-0.js 3 7 --strip-root --pretty
printf "class-0.js:4:3 = "
assert_ok "$FLOW" type-at-pos class-0.js 4 3 --strip-root --pretty
printf "class-0.js:4:10 = "
assert_ok "$FLOW" type-at-pos class-0.js 4 10 --strip-root --pretty
printf "class-0.js:9:5 = "
assert_ok "$FLOW" type-at-pos class-0.js 9 5 --strip-root --pretty
printf "class-0.js:12:5 = "
assert_ok "$FLOW" type-at-pos class-0.js 12 5 --strip-root --pretty
printf "class-0.js:15:5 = "
assert_ok "$FLOW" type-at-pos class-0.js 15 5 --strip-root --pretty
printf "class-0.js:21:5 = "
assert_ok "$FLOW" type-at-pos class-0.js 21 5 --strip-root --pretty
printf "class-0.js:24:5 = "
assert_ok "$FLOW" type-at-pos class-0.js 24 5 --strip-root --pretty
#class-1.js
# TODO this is not the ideal type
printf "class-1.js:4:3 = "
assert_ok "$FLOW" type-at-pos class-1.js 4 3 --strip-root --pretty
printf "class-1.js:8:3 = "
assert_ok "$FLOW" type-at-pos class-1.js 8 3 --strip-root --pretty
#class-2.js
printf "class-2.js:4:3 = "
assert_ok "$FLOW" type-at-pos class-2.js 4 3 --strip-root --pretty
printf "class-2.js:9:9 = "
assert_ok "$FLOW" type-at-pos class-2.js 9 9 --strip-root --pretty
printf "class-2.js:10:9 = "
assert_ok "$FLOW" type-at-pos class-2.js 10 9 --strip-root --pretty
printf "class-2.js:12:7 = "
assert_ok "$FLOW" type-at-pos class-2.js 12 7 --strip-root --pretty
printf "class-2.js:13:7 = "
assert_ok "$FLOW" type-at-pos class-2.js 13 7 --strip-root --pretty
#class-3.js
printf "class-3.js:4:3 = "
assert_ok "$FLOW" type-at-pos class-3.js 4 3 --strip-root --pretty
printf "class-3.js:9:9 = "
assert_ok "$FLOW" type-at-pos class-3.js 9 9 --strip-root --pretty
printf "class-3.js:10:9 = "
assert_ok "$FLOW" type-at-pos class-3.js 10 9 --strip-root --pretty
# class-bound.js
printf "class-bound.js:4:6 = "
assert_ok "$FLOW" type-at-pos class-bound.js 4 6 --strip-root --pretty
# class-getters-setters.js
printf "class-getters-setters.js:6:7 = "
assert_ok "$FLOW" type-at-pos class-getters-setters.js 6 7 --strip-root --pretty
printf "class-getters-setters.js:9:7 = "
assert_ok "$FLOW" type-at-pos class-getters-setters.js 9 7 --strip-root --pretty
# class-poly-0.js
printf "class-poly-0.js:5:7 = "
assert_ok "$FLOW" type-at-pos class-poly-0.js 5 7 --strip-root --pretty
printf "class-poly-0.js:5:9 = "
assert_ok "$FLOW" type-at-pos class-poly-0.js 5 9 --strip-root --pretty
printf "class-poly-0.js:10:26 = "
assert_ok "$FLOW" type-at-pos class-poly-0.js 10 26 --strip-root --pretty
# TODO constructor
# printf "class-poly-0.js:11:10 = "
# assert_ok "$FLOW" type-at-pos class-poly-0.js 11 10 --strip-root --pretty
printf "class-poly-0.js:12:7 = "
assert_ok "$FLOW" type-at-pos class-poly-0.js 12 7 --strip-root --pretty
printf "class-poly-0.js:16:7 = "
assert_ok "$FLOW" type-at-pos class-poly-0.js 16 7 --strip-root --pretty
printf "class-poly-0.js:16:10 = "
assert_ok "$FLOW" type-at-pos class-poly-0.js 16 10 --strip-root --pretty
printf "class-poly-0.js:17:7 = "
assert_ok "$FLOW" type-at-pos class-poly-0.js 17 7 --strip-root --pretty
#class-poly-1.js
printf "class-poly-1.js:9:5 = "
assert_ok "$FLOW" type-at-pos class-poly-1.js 9 5 --strip-root --pretty
printf "class-poly-1.js:9:11 = "
assert_ok "$FLOW" type-at-pos class-poly-1.js 9 11 --strip-root --pretty
# class-statics.js
printf "class-statics.js:4:10 = "
assert_ok "$FLOW" type-at-pos class-statics.js 4 10 --strip-root --pretty
printf "class-statics.js:8:10 = "
assert_ok "$FLOW" type-at-pos class-statics.js 8 10 --strip-root --pretty
printf "class-statics.js:9:7 = "
assert_ok "$FLOW" type-at-pos class-statics.js 9 7 --strip-root --pretty
printf "class-statics.js:11:8 = "
assert_ok "$FLOW" type-at-pos class-statics.js 11 8 --strip-root --pretty
printf "class-statics.js:16:5 = "
assert_ok "$FLOW" type-at-pos class-statics.js 16 5 --strip-root --pretty
printf "class-statics.js:17:5 = "
assert_ok "$FLOW" type-at-pos class-statics.js 17 5 --strip-root --pretty
# NOTE here Flow infers 'this', even though this is a static member
printf "class-statics.js:20:11 = "
assert_ok "$FLOW" type-at-pos class-statics.js 20 11 --strip-root --pretty
# class-statics-poly.js
printf "class-statics-poly.js:4:10 = "
assert_ok "$FLOW" type-at-pos class-statics-poly.js 4 10 --strip-root --pretty
printf "class-statics-poly.js:8:10 = "
assert_ok "$FLOW" type-at-pos class-statics-poly.js 8 10 --strip-root --pretty
# TODO the type 'Class<A>' is not parseable when 'A' is polymorphic
printf "class-statics-poly.js:9:7 = "
assert_ok "$FLOW" type-at-pos class-statics-poly.js 9 7 --strip-root --pretty
printf "class-statics-poly.js:11:8 = "
assert_ok "$FLOW" type-at-pos class-statics-poly.js 11 8 --strip-root --pretty
printf "class-statics-poly.js:16:5 = "
assert_ok "$FLOW" type-at-pos class-statics-poly.js 16 5 --strip-root --pretty
printf "class-statics-poly.js:17:5 = "
assert_ok "$FLOW" type-at-pos class-statics-poly.js 17 5 --strip-root --pretty
# destructuring.js
printf "destructuring.js:3:6 = "
assert_ok "$FLOW" type-at-pos destructuring.js 3 6 --strip-root --pretty
printf "destructuring.js:17:13 = "
assert_ok "$FLOW" type-at-pos destructuring.js 17 13 --strip-root --pretty
# exact.js
printf "exact.js:4:6 = "
assert_ok "$FLOW" type-at-pos exact.js 4 6 --strip-root --pretty
printf "exact.js:5:13 = "
assert_ok "$FLOW" type-at-pos exact.js 5 13 --strip-root --pretty
printf "exact.js:6:13 = "
assert_ok "$FLOW" type-at-pos exact.js 6 13 --strip-root --pretty
printf "exact.js:7:13 = "
assert_ok "$FLOW" type-at-pos exact.js 7 13 --strip-root --pretty
printf "exact.js:9:17 = "
assert_ok "$FLOW" type-at-pos exact.js 9 17 --strip-root --pretty
printf "exact.js:10:7 = "
assert_ok "$FLOW" type-at-pos exact.js 10 7 --strip-root --pretty
printf "exact.js:13:13 = "
assert_ok "$FLOW" type-at-pos exact.js 13 13 --strip-root --pretty
printf "exact.js:16:13 = "
assert_ok "$FLOW" type-at-pos exact.js 16 13 --strip-root --pretty
printf "exact.js:18:6 = "
assert_ok "$FLOW" type-at-pos exact.js 18 6 --strip-root --pretty
printf "exact.js:19:6 = "
assert_ok "$FLOW" type-at-pos exact.js 19 6 --strip-root --pretty
# facebookism.js
printf "facebookism.js:3:8 = "
assert_ok "$FLOW" type-at-pos facebookism.js 3 8 --strip-root --pretty
# TODO `require`
# printf "facebookism.js:3:14 = "
# assert_ok "$FLOW" type-at-pos facebookism.js 3 14 --strip-root --pretty
# function.js
printf "function.js:4:3 = "
assert_ok "$FLOW" type-at-pos function.js 4 3 --strip-root --pretty
printf "function.js:8:3 = "
assert_ok "$FLOW" type-at-pos function.js 8 3 --strip-root --pretty
printf "function.js:12:3 = "
assert_ok "$FLOW" type-at-pos function.js 12 3 --strip-root --pretty
printf "function.js:16:3 = "
assert_ok "$FLOW" type-at-pos function.js 16 3 --strip-root --pretty
# function-poly-0.js
printf "function-poly-0.js:3:10 = "
assert_ok "$FLOW" type-at-pos function-poly-0.js 3 10 --strip-root --pretty
printf "function-poly-0.js:3:30 = "
assert_ok "$FLOW" type-at-pos function-poly-0.js 3 30 --strip-root --pretty
printf "function-poly-0.js:4:7 = "
assert_ok "$FLOW" type-at-pos function-poly-0.js 4 7 --strip-root --pretty
# function-poly-1.js
printf "function-poly-1.js:3:10 = "
assert_ok "$FLOW" type-at-pos function-poly-1.js 3 10 --strip-root --pretty
printf "function-poly-1.js:3:3 = "
assert_ok "$FLOW" type-at-pos function-poly-1.js 3 33 --strip-root --pretty
printf "function-poly-1.js:4:7 = "
assert_ok "$FLOW" type-at-pos function-poly-1.js 4 7 --strip-root --pretty
# function-poly-2.js
printf "function-poly-2.js:3:10 = "
assert_ok "$FLOW" type-at-pos function-poly-2.js 3 10 --strip-root --pretty
printf "function-poly-2.js:4:12 = "
assert_ok "$FLOW" type-at-pos function-poly-2.js 4 12 --strip-root --pretty
printf "function-poly-2.js:5:5 = "
assert_ok "$FLOW" type-at-pos function-poly-2.js 5 5 --strip-root --pretty
printf "function-poly-2.js:6:5 = "
assert_ok "$FLOW" type-at-pos function-poly-2.js 6 5 --strip-root --pretty
printf "function-poly-2.js:7:12 = "
assert_ok "$FLOW" type-at-pos function-poly-2.js 7 12 --strip-root --pretty
printf "function-poly-2.js:9:13 = "
assert_ok "$FLOW" type-at-pos function-poly-2.js 9 13 --strip-root --pretty
printf "function-poly-2.js:11:12 = "
assert_ok "$FLOW" type-at-pos function-poly-2.js 11 12 --strip-root --pretty
# function-poly-3.js
printf "function-poly-3.js:8:1 = "
assert_ok "$FLOW" type-at-pos function-poly-3.js 8 1 --strip-root --pretty
# function-poly-4.js
printf "function-poly-4.js:3:11 = "
assert_ok "$FLOW" type-at-pos function-poly-4.js 3 11 --strip-root --pretty
printf "function-poly-4.js:7:7 = "
assert_ok "$FLOW" type-at-pos function-poly-4.js 7 7 --strip-root --pretty
printf "function-poly-4.js:9:7 = "
assert_ok "$FLOW" type-at-pos function-poly-4.js 9 7 --strip-root --pretty
# function-poly-5.js
printf "function-poly-5.js:3:10 = "
assert_ok "$FLOW" type-at-pos function-poly-5.js 3 10 --strip-root --pretty
# generics.js
printf "generics.js:5:1 = "
assert_ok "$FLOW" type-at-pos generics.js 5 1 --strip-root --pretty
printf "generics.js:10:1 = "
assert_ok "$FLOW" type-at-pos generics.js 10 1 --strip-root --pretty
printf "generics.js:14:1 = "
assert_ok "$FLOW" type-at-pos generics.js 14 1 --strip-root --pretty
printf "generics.js:18:1 = "
assert_ok "$FLOW" type-at-pos generics.js 18 1 --strip-root --pretty
printf "generics.js:22:1 = "
assert_ok "$FLOW" type-at-pos generics.js 22 1 --strip-root --pretty
printf "generics.js:26:1 = "
assert_ok "$FLOW" type-at-pos generics.js 26 1 --strip-root --pretty
printf "generics.js:30:13 = "
assert_ok "$FLOW" type-at-pos generics.js 30 13 --strip-root --pretty
# implements.js
printf "implements.js:7:8 = "
assert_ok "$FLOW" type-at-pos implements.js 4 23 --strip-root --pretty
# import_lib.js
printf "import_lib.js:7:8 = "
assert_ok "$FLOW" type-at-pos import_lib.js 7 8 --strip-root --pretty
printf "import_lib.js:7:25 (--expand-json-output) = "
assert_ok "$FLOW" type-at-pos import_lib.js 7 25 --strip-root --pretty --expand-json-output
# import_lib_named.js
printf "import_lib_named.js:3:15 (--expand-json-output) = "
assert_ok "$FLOW" type-at-pos import_lib_named.js 3 15 --strip-root --pretty --expand-json-output
printf "import_lib_named.js:3:27 (--expand-json-output) = "
assert_ok "$FLOW" type-at-pos import_lib_named.js 3 27 --strip-root --pretty --expand-json-output
# interface.js
printf "interface.js:3:12 = "
assert_ok "$FLOW" type-at-pos interface.js 3 12 --strip-root --pretty
printf "interface.js:9:15 = "
assert_ok "$FLOW" type-at-pos interface.js 9 15 --strip-root --pretty
printf "interface.js:9:19 = "
assert_ok "$FLOW" type-at-pos interface.js 9 19 --strip-root --pretty
# # TODO: report specialized type
# printf "interface.js:10:6 = "
# assert_ok "$FLOW" type-at-pos interface.js 10 6 --strip-root --pretty
# printf "interface.js:11:6 = "
# assert_ok "$FLOW" type-at-pos interface.js 11 6 --strip-root --pretty
# printf "interface.js:13:6 = "
# assert_ok "$FLOW" type-at-pos interface.js 13 6 --strip-root --pretty
printf "interface.js:17:7 = "
assert_ok "$FLOW" type-at-pos interface.js 17 7 --strip-root --pretty
printf "interface.js:18:7 = "
assert_ok "$FLOW" type-at-pos interface.js 18 7 --strip-root --pretty
# declare_class.js
printf "declare_class.js:3:15 = "
assert_ok "$FLOW" type-at-pos declare_class.js 3 15 --strip-root --pretty
# mixed.js
printf "mixed.js:18:17 = "
assert_ok "$FLOW" type-at-pos mixed.js 18 17 --strip-root --pretty
# exports.js
printf "exports.js:3:24 = "
assert_ok "$FLOW" type-at-pos exports.js 3 24 --strip-root --pretty
printf "exports.js:5:25 = "
assert_ok "$FLOW" type-at-pos exports.js 5 25 --strip-root --pretty
# module-export.js
printf "module-export.js:7:13 = "
assert_ok "$FLOW" type-at-pos module-export.js 7 13 --strip-root --pretty
# module-import.js
printf "module-import.js:3:7 = "
assert_ok "$FLOW" type-at-pos module-import.js 3 7 --strip-root --pretty
# import-default.js
printf "import-default.js:3:16 = "
assert_ok "$FLOW" type-at-pos import-default.js 3 16 --strip-root --pretty
# import-typeof-class.js
printf "import-typeof-class.js:6:16 "
assert_ok "$FLOW" type-at-pos import-typeof-class.js 6 16 --strip-root --pretty --expand-json-output
printf "import-typeof-class.js:7:16 "
assert_ok "$FLOW" type-at-pos import-typeof-class.js 7 16 --strip-root --pretty --expand-json-output
# object.js
printf "object.js:3:15 = "
assert_ok "$FLOW" type-at-pos object.js 3 15 --strip-root --pretty
printf "object.js:3:19 = "
assert_ok "$FLOW" type-at-pos object.js 3 19 --strip-root --pretty
printf "object.js:3:24 = "
assert_ok "$FLOW" type-at-pos object.js 3 24 --strip-root --pretty
printf "object.js:3:29 = "
assert_ok "$FLOW" type-at-pos object.js 3 29 --strip-root --pretty
printf "object.js:3:40 = "
assert_ok "$FLOW" type-at-pos object.js 3 40 --strip-root --pretty
printf "object.js:6:5 = "
assert_ok "$FLOW" type-at-pos object.js 6 5 --strip-root --pretty
printf "object.js:6:7 = " # TODO can we do better with duplication?
assert_ok "$FLOW" type-at-pos object.js 6 7 --strip-root --pretty
printf "object.js:7:10 = "
assert_ok "$FLOW" type-at-pos object.js 7 10 --strip-root --pretty
printf "object.js:7:12 = "
assert_ok "$FLOW" type-at-pos object.js 7 12 --strip-root --pretty
printf "object.js:8:14 = "
assert_ok "$FLOW" type-at-pos object.js 8 14 --strip-root --pretty
printf "object.js:8:16 = "
assert_ok "$FLOW" type-at-pos object.js 8 16 --strip-root --pretty
printf "object.js:9:18 = "
assert_ok "$FLOW" type-at-pos object.js 9 18 --strip-root --pretty
printf "object.js:9:34 = "
assert_ok "$FLOW" type-at-pos object.js 9 34 --strip-root --pretty
printf "object.js:15:3 = "
assert_ok "$FLOW" type-at-pos object.js 15 3 --strip-root --pretty
printf "object.js:16:3 = "
assert_ok "$FLOW" type-at-pos object.js 16 3 --strip-root --pretty
printf "object.js:19:3 = "
assert_ok "$FLOW" type-at-pos object.js 19 3 --strip-root --pretty
printf "object.js:19:7 = "
assert_ok "$FLOW" type-at-pos object.js 19 7 --strip-root --pretty
printf "object.js:20:7 = "
assert_ok "$FLOW" type-at-pos object.js 20 7 --strip-root --pretty
printf "object.js:21:7 = "
assert_ok "$FLOW" type-at-pos object.js 21 7 --strip-root --pretty
printf "object.js:22:7 = "
assert_ok "$FLOW" type-at-pos object.js 22 7 --strip-root --pretty
printf "object.js:35:1 = "
assert_ok "$FLOW" type-at-pos object.js 35 1 --strip-root --pretty
# object-resolution.js
printf "object-resolution.js:5:2 = "
assert_ok "$FLOW" type-at-pos object-resolution.js 5 2 --strip-root --pretty
printf "object-resolution.js:10:2 = "
assert_ok "$FLOW" type-at-pos object-resolution.js 10 2 --strip-root --pretty
printf "object-resolution.js:13:5 = "
assert_ok "$FLOW" type-at-pos object-resolution.js 13 5 --strip-root --pretty
# optional.js
printf "optional.js:4:10 = "
assert_ok "$FLOW" type-at-pos optional.js 4 10 --strip-root --pretty
printf "optional.js:7:2 = "
assert_ok "$FLOW" type-at-pos optional.js 7 2 --strip-root --pretty
printf "optional.js:10:11 = "
assert_ok "$FLOW" type-at-pos optional.js 10 11 --strip-root --pretty
printf "optional.js:10:14 = "
assert_ok "$FLOW" type-at-pos optional.js 10 14 --strip-root --pretty
printf "optional.js:14:10 = "
assert_ok "$FLOW" type-at-pos optional.js 14 10 --strip-root --pretty
# predicates.js
# printf "predicates.js:4:12 (null) = "
# assert_ok "$FLOW" type-at-pos predicates.js 4 12 --strip-root --pretty
printf "predicates.js - undefined: "
assert_ok "$FLOW" type-at-pos predicates.js 5 12 --strip-root --pretty
printf "predicates.js - Array: "
assert_ok "$FLOW" type-at-pos predicates.js 6 6 --strip-root --pretty
printf "predicates.js - isArray: "
assert_ok "$FLOW" type-at-pos predicates.js 6 15 --strip-root --pretty
printf "predicates.js - y (refined obj): "
assert_ok "$FLOW" type-at-pos predicates.js 8 5 --strip-root --pretty
printf "predicates.js - if (y.FOO) obj: "
assert_ok "$FLOW" type-at-pos predicates.js 9 5 --strip-root --pretty
printf "predicates.js - if (y.FOO) prop: "
assert_ok "$FLOW" type-at-pos predicates.js 9 8 --strip-root --pretty
printf "predicates.js - if (y.FOO == '') obj: "
assert_ok "$FLOW" type-at-pos predicates.js 10 5 --strip-root --pretty
printf "predicates.js - if (y.FOO == '') prop: "
assert_ok "$FLOW" type-at-pos predicates.js 10 8 --strip-root --pretty
printf "predicates.js - if (y.FOO === '') obj: "
assert_ok "$FLOW" type-at-pos predicates.js 11 5 --strip-root --pretty
printf "predicates.js - if (y.FOO === '') prop: "
assert_ok "$FLOW" type-at-pos predicates.js 11 8 --strip-root --pretty
printf "predicates.js - if (y.FOO == null) prop: "
assert_ok "$FLOW" type-at-pos predicates.js 12 8 --strip-root --pretty
printf "predicates.js - if (y.FOO == undefined) prop: "
assert_ok "$FLOW" type-at-pos predicates.js 13 8 --strip-root --pretty
printf "predicates.js - if (Array.isArray(y.FOO)): "
assert_ok "$FLOW" type-at-pos predicates.js 14 22 --strip-root --pretty
# react_component.js
printf "react_component.js:3:9 = "
assert_ok "$FLOW" type-at-pos react_component.js 3 9 --strip-root --pretty
printf "react_component.js:13:33 = "
assert_ok "$FLOW" type-at-pos react_component.js 13 33 --strip-root --pretty
printf "react_component.js:18:17 = "
assert_ok "$FLOW" type-at-pos react_component.js 18 17 --strip-root --pretty
printf "react_component.js:31:7 = "
assert_ok "$FLOW" type-at-pos react_component.js 31 7 --strip-root --pretty --expand-json-output
printf "react_component.js:32:13 = "
assert_ok "$FLOW" type-at-pos react_component.js 32 13 --strip-root --pretty --expand-json-output
printf "react_component.js:32:29 = "
assert_ok "$FLOW" type-at-pos react_component.js 32 29 --strip-root --pretty --expand-json-output
# react.js
printf "react.js:2:7 = "
assert_ok "$FLOW" type-at-pos react.js 2 7 --strip-root --pretty
# recursive.js
printf "recursive.js:3:25 = "
assert_ok "$FLOW" type-at-pos recursive.js 3 25 --strip-root --pretty
printf "recursive.js:6:11 = "
assert_ok "$FLOW" type-at-pos recursive.js 6 11 --strip-root --pretty
printf "recursive.js:13:12 = "
assert_ok "$FLOW" type-at-pos recursive.js 13 12 --strip-root --pretty
printf "recursive.js:23:12 = "
assert_ok "$FLOW" type-at-pos recursive.js 23 12 --strip-root --pretty
printf "recursive.js:38:2 = "
assert_ok "$FLOW" type-at-pos recursive.js 38 2 --strip-root --pretty
printf "recursive.js:41:17 = "
assert_ok "$FLOW" type-at-pos recursive.js 41 17 --strip-root --pretty
printf "recursive.js:58:1 = "
assert_ok "$FLOW" type-at-pos recursive.js 58 1 --strip-root --pretty
printf "recursive.js:60:6 = "
assert_ok "$FLOW" type-at-pos recursive.js 60 6 --strip-root --pretty
printf "recursive.js:60:31 = "
assert_ok "$FLOW" type-at-pos recursive.js 60 31 --strip-root --pretty
# refinement.js
printf "refinement.js:7:25 = "
assert_ok "$FLOW" type-at-pos refinement.js 7 25 --strip-root --pretty
printf "refinement.js:8:25 = "
assert_ok "$FLOW" type-at-pos refinement.js 8 25 --strip-root --pretty
# require-class.js
printf "require-class.js:5:16 = "
assert_ok "$FLOW" type-at-pos require-class.js 5 16 --strip-root --expand-json-output --pretty
printf "require-class.js:6:16 = "
assert_ok "$FLOW" type-at-pos require-class.js 6 16 --strip-root --expand-json-output --pretty
# test.js
printf "test.js:5:1 = "
assert_ok "$FLOW" type-at-pos test.js 5 1 --strip-root --pretty
printf "test.js:8:7 = "
assert_ok "$FLOW" type-at-pos test.js 8 7 --strip-root --pretty
printf "test.js:10:7 = "
assert_ok "$FLOW" type-at-pos test.js 10 7 --strip-root --pretty
printf "test.js:12:7 = "
assert_ok "$FLOW" type-at-pos test.js 12 7 --strip-root --pretty
printf "test.js:14:7 = "
assert_ok "$FLOW" type-at-pos test.js 14 7 --strip-root --pretty
# templates.js
# NOTE: not supported any more
# printf "templates.js:2:7 = "
# assert_ok "$FLOW" type-at-pos templates.js 2 7 --strip-root --pretty
# trycatch.js
# TODO track type reaching catch variable
# printf "trycatch.js:5:10 = "
# assert_ok "$FLOW" type-at-pos trycatch.js 5 10 --strip-root --pretty
# type-destructor.js
printf "type-destructor.js:3:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 3 6 --strip-root --pretty
printf "type-destructor.js:4:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 4 6 --strip-root --pretty
printf "type-destructor.js:5:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 5 6 --strip-root --pretty
printf "type-destructor.js:8:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 8 6 --strip-root --pretty
printf "type-destructor.js:10:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 10 6 --strip-root --pretty
printf "type-destructor.js:12:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 12 6 --strip-root --pretty
printf "type-destructor.js:13:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 13 6 --strip-root --pretty
printf "type-destructor.js:15:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 15 6 --strip-root --pretty
printf "type-destructor.js:16:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 16 6 --strip-root --pretty
printf "type-destructor.js:17:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 17 6 --strip-root --pretty
printf "type-destructor.js:19:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 19 6 --strip-root --pretty
printf "type-destructor.js:20:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 20 6 --strip-root --pretty
printf "type-destructor.js:21:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 21 6 --strip-root --pretty
printf "type-destructor.js:23:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 23 6 --strip-root --pretty
printf "type-destructor.js:27:5 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 27 5 --strip-root --pretty
printf "type-destructor.js:28:5 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 28 5 --strip-root --pretty
printf "type-destructor.js:29:5 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 28 5 --strip-root --pretty
printf "type-destructor.js:33:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 33 6 --strip-root --pretty
printf "type-destructor.js:34:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 34 6 --strip-root --pretty
printf "type-destructor.js:36:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 36 6 --strip-root --pretty
printf "type-destructor.js:37:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 37 6 --strip-root --pretty
printf "type-destructor.js:41:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 41 6 --strip-root --pretty
printf "type-destructor.js:42:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 42 6 --strip-root --pretty
printf "type-destructor.js:44:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 44 6 --strip-root --pretty
printf "type-destructor.js:45:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 45 6 --strip-root --pretty
printf "type-destructor.js:47:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 47 6 --strip-root --pretty
printf "type-destructor.js:48:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 48 6 --strip-root --pretty
printf "type-destructor.js:62:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 62 6 --strip-root --pretty
printf "type-destructor.js:63:6 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 63 6 --strip-root --pretty
printf "type-destructor.js:68:13 = "
assert_ok "$FLOW" type-at-pos type-destructor.js 68 13 --strip-root --pretty
# unions.js
printf "unions.js:9:3 = "
assert_ok "$FLOW" type-at-pos unions.js 9 3 --strip-root --pretty
printf "unions.js:15:2 = "
assert_ok "$FLOW" type-at-pos unions.js 15 2 --strip-root --pretty
printf "unions.js:24:3 = "
assert_ok "$FLOW" type-at-pos unions.js 24 3 --strip-root --pretty
printf "unions.js:43:3 = "
assert_ok "$FLOW" type-at-pos unions.js 43 3 --strip-root --pretty
printf "unions.js:44:3 = "
assert_ok "$FLOW" type-at-pos unions.js 44 3 --strip-root --pretty
printf "unions.js:49:1 = "
assert_ok "$FLOW" type-at-pos unions.js 49 1 --strip-root --pretty
printf "unions.js:52:1 = "
assert_ok "$FLOW" type-at-pos unions.js 52 1 --strip-root --pretty
printf "unions.js:57:5 = "
assert_ok "$FLOW" type-at-pos unions.js 57 5 --strip-root --pretty
printf "unions.js:59:18 = "
assert_ok "$FLOW" type-at-pos unions.js 59 18 --strip-root --pretty
# opaque.js
printf "opaque.js:3:20 = "
assert_ok "$FLOW" type-at-pos opaque.js 3 20 --strip-root --pretty
printf "opaque.js:4:14 = "
assert_ok "$FLOW" type-at-pos opaque.js 4 14 --strip-root --pretty
printf "opaque.js:4:19 = "
assert_ok "$FLOW" type-at-pos opaque.js 4 19 --strip-root --pretty
printf "opaque.js:6:22 = "
assert_ok "$FLOW" type-at-pos opaque.js 6 22 --strip-root --pretty
printf "opaque.js:7:13 = "
assert_ok "$FLOW" type-at-pos opaque.js 7 13 --strip-root --pretty
printf "opaque.js:7:18 = "
assert_ok "$FLOW" type-at-pos opaque.js 7 18 --strip-root --pretty
printf "opaque.js:9:22 = "
assert_ok "$FLOW" type-at-pos opaque.js 9 22 --strip-root --pretty
printf "opaque.js:10:13 = "
assert_ok "$FLOW" type-at-pos opaque.js 10 13 --strip-root --pretty
printf "opaque.js:10:18 = "
assert_ok "$FLOW" type-at-pos opaque.js 10 18 --strip-root --pretty
printf "opaque.js:12:14 = "
assert_ok "$FLOW" type-at-pos opaque.js 12 14 --strip-root --pretty
printf "opaque.js:13:14 = "
assert_ok "$FLOW" type-at-pos opaque.js 13 14 --strip-root --pretty
printf "opaque.js:13:19 = "
assert_ok "$FLOW" type-at-pos opaque.js 13 19 --strip-root --pretty
printf "opaque.js:15:22 = "
assert_ok "$FLOW" type-at-pos opaque.js 15 22 --strip-root --pretty
printf "opaque.js:16:14 = "
assert_ok "$FLOW" type-at-pos opaque.js 16 14 --strip-root --pretty
printf "opaque.js:16:19 = "
assert_ok "$FLOW" type-at-pos opaque.js 16 19 --strip-root --pretty
printf "opaque.js:19:14 = "
assert_ok "$FLOW" type-at-pos opaque.js 19 14 --strip-root --pretty
printf "opaque.js:19:22 = "
assert_ok "$FLOW" type-at-pos opaque.js 19 22 --strip-root --pretty
printf "opaque.js:20:16 = "
assert_ok "$FLOW" type-at-pos opaque.js 20 16 --strip-root --pretty
printf "opaque.js:20:34 = "
assert_ok "$FLOW" type-at-pos opaque.js 20 34 --strip-root --pretty
printf "opaque.js:21:19 = "
assert_ok "$FLOW" type-at-pos opaque.js 21 19 --strip-root --pretty
printf "opaque.js:21:28 = "
assert_ok "$FLOW" type-at-pos opaque.js 21 28 --strip-root --pretty
printf "opaque.js:24:7 = "
assert_ok "$FLOW" type-at-pos opaque.js 24 7 --strip-root --pretty
# optional_chaining.js
printf "optional_chaining.js:16:7 = "
assert_ok "$FLOW" type-at-pos optional_chaining.js 16 7 --strip-root --pretty
printf "optional_chaining.js:16:11 = "
assert_ok "$FLOW" type-at-pos optional_chaining.js 16 11 --strip-root --pretty
printf "optional_chaining.js:16:16 = "
assert_ok "$FLOW" type-at-pos optional_chaining.js 16 16 --strip-root --pretty
printf "optional_chaining.js:16:20 = "
assert_ok "$FLOW" type-at-pos optional_chaining.js 16 20 --strip-root --pretty
printf "optional_chaining.js:16:24 = "
assert_ok "$FLOW" type-at-pos optional_chaining.js 16 24 --strip-root --pretty
# type-alias.js
printf "type-alias.js:3:6 = "
assert_ok "$FLOW" type-at-pos type-alias.js 3 6 --strip-root --pretty
printf "type-alias.js:4:6 = "
assert_ok "$FLOW" type-at-pos type-alias.js 4 6 --strip-root --pretty
printf "type-alias.js:5:6 = "
assert_ok "$FLOW" type-at-pos type-alias.js 5 6 --strip-root --pretty
printf "type-alias.js:6:6 = "
assert_ok "$FLOW" type-at-pos type-alias.js 6 6 --strip-root --pretty
printf "type-alias.js:7:6 = "
assert_ok "$FLOW" type-at-pos type-alias.js 7 6 --strip-root --pretty
printf "type-alias.js:7:6 (--expand-type-aliases) = "
assert_ok "$FLOW" type-at-pos type-alias.js 7 6 --strip-root --pretty --expand-type-aliases
printf "type-alias.js:8:6 = "
assert_ok "$FLOW" type-at-pos type-alias.js 8 6 --strip-root --pretty
printf "type-alias.js:12:12 "
assert_ok "$FLOW" type-at-pos type-alias.js 12 12 --strip-root --pretty
printf "type-alias.js:12:29 "
assert_ok "$FLOW" type-at-pos type-alias.js 12 29 --strip-root --pretty
# Test interaction with RPolyTest
printf "type-alias.js:15:8 "
assert_ok "$FLOW" type-at-pos type-alias.js 15 8 --strip-root --pretty
printf "type-alias.js:16:8 "
assert_ok "$FLOW" type-at-pos type-alias.js 16 8 --strip-root --pretty
printf "type-alias.js:17:8 "
assert_ok "$FLOW" type-at-pos type-alias.js 17 8 --strip-root --pretty
printf "type-alias.js:18:8 "
assert_ok "$FLOW" type-at-pos type-alias.js 18 8 --strip-root --pretty
printf "type-alias.js:19:8 "
assert_ok "$FLOW" type-at-pos type-alias.js 19 8 --strip-root --pretty
printf "type-alias.js:20:8 "
assert_ok "$FLOW" type-at-pos type-alias.js 20 8 --strip-root --pretty
printf "type-alias.js:24:6 "
assert_ok "$FLOW" type-at-pos type-alias.js 24 6 --strip-root --pretty --expand-type-aliases
printf "type-alias.js:25:6 "
assert_ok "$FLOW" type-at-pos type-alias.js 25 6 --strip-root --pretty --expand-type-aliases
printf "type-alias.js:27:6 "
assert_ok "$FLOW" type-at-pos type-alias.js 27 6 --strip-root --pretty --expand-type-aliases
printf "type-alias.js:29:6 "
assert_ok "$FLOW" type-at-pos type-alias.js 29 6 --strip-root --pretty --expand-type-aliases
printf "type-alias.js:31:6 "
assert_ok "$FLOW" type-at-pos type-alias.js 31 6 --strip-root --pretty --expand-type-aliases
printf "type-alias.js:34:6 "
assert_ok "$FLOW" type-at-pos type-alias.js 34 6 --strip-root --pretty --expand-json-output
| JonathanUsername/flow | tests/type-at-pos/test.sh | Shell | mit | 30,990 |
#!/bin/sh
#
# MediaWiki Setup Script
#
# This script will install and configure MediaWiki on
# an Ubuntu 14.04 droplet
export DEBIAN_FRONTEND=noninteractive;
# Generate root and wordpress mysql passwords
rootmysqlpass=`dd if=/dev/urandom bs=1 count=32 2>/dev/null | base64 -w 0 | rev | cut -b 2- | rev | tr -dc 'a-zA-Z0-9'`;
mwmysqlpass=`dd if=/dev/urandom bs=1 count=32 2>/dev/null | base64 -w 0 | rev | cut -b 2- | rev | tr -dc 'a-zA-Z0-9'`;
# Write passwords to file
echo "MySQL Passwords for this droplet " > /etc/motd.tail;
echo "-----------------------------------" >> /etc/motd.tail;
echo "Root MySQL Password: $rootmysqlpass" >> /etc/motd.tail;
echo "MediaWiki MySQL Database: mwdb" >> /etc/motd.tail;
echo "Mediawiki MySQL Username: mwsql" >> /etc/motd.tail;
echo "Mediawiki MySQL Password: $mwmysqlpass" >> /etc/motd.tail;
echo "-----------------------------------" >> /etc/motd.tail;
echo "You can remove this information with 'rm -f /etc/motd.tail'" >> /etc/motd.tail;
apt-get update;
apt-get -y install apache2 mysql-server libapache2-mod-auth-mysql php5-mysql php5 libapache2-mod-php5 php5-mcrypt php5-gd php5-intl php-pear php5-dev make libpcre3-dev php-apc;
# Set up database user
/usr/bin/mysqladmin -u root -h localhost create mwdb;
/usr/bin/mysqladmin -u root -h localhost password $rootmysqlpass;
/usr/bin/mysql -uroot -p$rootmysqlpass -e "CREATE USER mwsql@localhost IDENTIFIED BY '"$mwmysqlpass"'";
/usr/bin/mysql -uroot -p$rootmysqlpass -e "GRANT ALL PRIVILEGES ON mwdb.* TO mwsql@localhost";
rm -f /var/www/html/index.html;
wget http://releases.wikimedia.org/mediawiki/1.25/mediawiki-1.25.1.tar.gz -O /root/mediawiki.tar.gz;
cd /root;
tar -zxf /root/mediawiki.tar.gz;
cp -Rf /root/mediawiki-1.25.1/* /var/www/html/.;
#rm /root/mediawiki.tar.gz;
#rm -Rf /root/mediawiki-1.25.1;
chown -Rf www-data.www-data /var/www/html;
service apache2 restart;
cat /etc/motd.tail > /var/run/motd.dynamic;
chmod 0660 /var/run/motd.dynamic;
| digitalocean/do_user_scripts | Ubuntu-14.04/cms/mediawiki.sh | Shell | mit | 1,955 |
# Update the X font indexes:
if [ -x /usr/bin/mkfontdir ]; then
/usr/bin/mkfontscale /usr/share/fonts/TTF
/usr/bin/mkfontdir /usr/share/fonts/TTF
/usr/bin/mkfontscale /usr/share/fonts/OTF
/usr/bin/mkfontdir /usr/share/fonts/OTF
fi
if [ -x /usr/bin/fc-cache ]; then
/usr/bin/fc-cache -f
fi
| panosmdma/SlackOnly-SlackBuilds | system/linux-libertine-fonts/doinst.sh | Shell | mit | 301 |
#!/bin/sh -e
usage()
{
echo "Usage: ${0} [--structure-only] DATABASE_NAME"
}
STRUCTURE_ONLY=false
if [ "${1}" = --structure-only ]; then
STRUCTURE_ONLY=true
shift
fi
DATABASE_NAME="${1}"
if [ "${DATABASE_NAME}" = "" ]; then
usage
exit 1
fi
if [ "${STRUCTURE_ONLY}" = true ]; then
FILE="${DATABASE_NAME}-structure.sql"
else
FILE="${DATABASE_NAME}-full.sql"
fi
if [ -f "${FILE}" ]; then
echo "File exists: ${FILE}"
exit 1
fi
if [ "${STRUCTURE_ONLY}" = true ]; then
mysqldump --user=root --password --protocol=tcp --no-data --databases "${DATABASE_NAME}" > "${FILE}"
else
mysqldump --user=root --password --protocol=tcp --databases "${DATABASE_NAME}" > "${FILE}"
fi
| FunTimeCoding/mysql-tools | bin/backup-database.sh | Shell | mit | 717 |
#!/bin/bash
cd "$(dirname "${BASH_SOURCE[0]}")" \
&& . "utils.sh"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
tohome() {
sourceFile="$(cd .. && pwd)/$1"
targetFile="$HOME/.$(printf "%s" "$1" | sed "s/.*\/\(.*\)/\1/g")"
if [ ! -e "$targetFile" ] || $skipQuestions; then
execute \
"ln -fs $sourceFile $targetFile" \
"$targetFile → $sourceFile"
elif [ "$(readlink "$targetFile")" == "$sourceFile" ]; then
print_success "$targetFile → $sourceFile"
else
if ! $skipQuestions; then
ask_for_confirmation "'$targetFile' already exists, do you want to overwrite it?"
if answer_is_yes; then
rm -rf "$targetFile"
execute \
"ln -fs $sourceFile $targetFile" \
"$targetFile → $sourceFile"
else
print_error "$targetFile → $sourceFile"
fi
fi
fi
}
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
toother() {
sourceFile="$(cd .. && pwd)/$1"
targetFile="$HOME/$2"
targetdir=$(dirname "$targetFile")
mkdir -p "$targetdir"
if [ ! -e "$targetFile" ] || $skipQuestions; then
execute \
"ln -fs $sourceFile $targetFile" \
"$targetFile → $sourceFile"
elif [ "$(readlink "$targetFile")" == "$sourceFile" ]; then
print_success "$targetFile → $sourceFile"
else
if ! $skipQuestions; then
ask_for_confirmation "'$targetFile' already exists, do you want to overwrite it?"
if answer_is_yes; then
rm -rf "$targetFile"
execute \
"ln -fs $sourceFile $targetFile" \
"$targetFile → $sourceFile"
else
print_error "$targetFile → $sourceFile"
fi
fi
fi
}
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
create_symlinks() {
declare -a FILES_TO_SYMLINK=(
"shell/bash_logout"
"shell/bash_profile"
"shell/bashrc"
"shell/tmux.conf"
"git/gitconfig"
"conky/conkyrc"
"R/Rprofile"
"zsh/zshrc"
)
local i=""
local sourceFile=""
local targetFile=""
local skipQuestions=false
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
skip_questions "$@" \
&& skipQuestions=true
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
for i in "${FILES_TO_SYMLINK[@]}"; do
tohome "$i"
done
toother "xfce4/terminal/terminalrc" ".config/xfce4/terminal/terminalrc"
toother "xfce4/panel/whiskermenu-1.rc" ".config/xfce4/panel/whiskermenu-1.rc"
toother "sublime-text/Package\ Control.sublime-settings" ".config/sublime-text-3/Packages/User/Package\ Control.sublime-settings"
toother "sublime-text/Preferences.sublime-settings" ".config/sublime-text-3/Packages/User/Preferences.sublime-settings"
toother "sublime-text/bash.sublime-build" ".config/sublime-text-3/Packages/User/bash.sublime-build"
toother "sublime-text/xetex.sublime-build" ".config/sublime-text-3/Packages/User/xetex.sublime-build"
}
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
main() {
print_info "Create symbolic links"
create_symlinks "$@"
}
main "$@"
| EDiLD/dotfiles | src/os/create_symbolic_links.sh | Shell | mit | 3,621 |
#!/bin/bash
## This script installs the irace package, sets up the environment and launches
## irace. Then compresses its output as a tar.gz.
#SCENARIO is a irace's scenario file
SCENARIO=$1
# RUN is the run number to distinguish replications of irace
RUN=$2
shift 2
# RUN in condor starts at 0
let SEED=1234567+RUN
IRACE_PARAMS="--scenario scenario-${SCENARIO}.txt --debug-level 3 --parallel 24 --seed ${SEED}"
#tar axf condor-input.tar.gz
RPACKAGE="irace.tar.gz"
# install irace
if [ ! -r $RPACKAGE ]; then
echo "cannot read $RPACKAGE"
exit 1
fi
RLIBDIR="$(pwd)/R/"
mkdir -p $RLIBDIR
R CMD INSTALL $RPACKAGE --library=$RLIBDIR
export R_LIBS="$RLIBDIR:$R_LIBS"
irace="$(pwd)/R/irace/bin/irace"
if [ ! -x $irace ]; then
echo "cannot execute $irace"
exit 1
fi
export PATH="$(pwd)/":${PATH}
#cat /proc/cpuinfo
#echo "$irace --scenario scenario-${1}.txt --exec-dir=./execdir --debug-level 2 --parallel 8"
mkdir -p execdir && $irace --exec-dir=./execdir ${IRACE_PARAMS} 1> execdir/irace.stdout.txt 2> execdir/irace.stderr.txt
#| xz - > execdir/irace.stdout.xz
#cd ..
#tar acf result.tar.gz irace/execdir
#ls ./execdir
#cat ./execdir/c1-1.stderr
#cd .. && tar acf result.tar.gz irace
#ls ../result.tar.gz
| matthieu-vergne/jMetal | jmetal-experimental/src/main/resources/irace/run.sh | Shell | mit | 1,221 |
source ~/.functions_colors_shell.zsh
# functions and load-onces stuff for the environment
echo "function config"
source ~/.functions_shell.sh
source ~/.functions_colors.sh
source ~/.functions_dev.sh
source ~/.functions_osx.sh
source ~/.functions_graphics.sh
# Vim IDE settings
source ~/.bash_vim_append
test -e "${HOME}/.dotfiles/mac/iterm2_shell_integration.sh" && source "${HOME}/.dotfiles/mac/iterm2_shell_integration.sh"
alias mkdir=/bin/mkdir
alias ll='ls -alGh'
export PATH="$HOME/.dotfiles/bin:$PATH"
| NewAlexandria/dotfiles | zsh/functions.zsh | Shell | mit | 514 |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/PopupDialog/PopupDialog.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/PopupDialog/PopupDialog.framework"
fi
| monadis/PopupDialog | Example/Pods/Target Support Files/Pods-PopupDialog_Example/Pods-PopupDialog_Example-frameworks.sh | Shell | mit | 3,621 |
#!/bin/bash
mkdir $PREFIX/share
mkdir $PREFIX/share/gdal
cp data/* $PREFIX/share/gdal
cp LICENSE.TXT $PREFIX/share/gdal
mkdir $PREFIX/bin # HACK to get post-link script to copy.
| jjhelmus/conda_recipes_testing | gdal/gdal-data_1.10.1/build.sh | Shell | mit | 182 |
#!/usr/bin/env bash
PATH=/opt/usao/moodle3/bin:/usr/local/bin:/usr/bin:/bin:/sbin:$PATH
## Require arguments
if [ -z "$1" ] || [ -z "$2" ] || [ -z "$3" ] ; then
cat <<USAGE
moodle3_migrate.sh migrates a site between hosts.
Usage: moodle3_migrate.sh \$dest_moodledir \$src_moodlehost \$src_cfgdir
\$dest_moodledir local dir for Moodle site (eg. /srv/example).
\$src_moodlehost host of site to migrate
\$src_cfgdir remote dir of site to migrate on \$src_moodlehost
USAGE
exit 1;
fi
source /opt/usao/moodle3/etc/moodle3_conf.sh
dest_moodledir=${1}
src_moodlehost=${2}
src_cfgdir=${3}
dest_basename=$(basename "$dest_moodledir")
# Read src config file
src_cfg=$(ssh ${src_moodlehost} cat ${src_cfgdir}/config.php)
# Set vars from it.
src_dbhost=`echo "${src_cfg}" | grep '^$CFG->dbhost' | cut -d "=" -f 2 | cut -d ';' -f 1 | xargs`
src_dbname=`echo "${src_cfg}" | grep '^$CFG->dbname' | cut -d "=" -f 2 | cut -d ';' -f 1 | xargs`
src_dbuser=`echo "${src_cfg}" | grep '^$CFG->dbuser' | cut -d "=" -f 2 | cut -d ';' -f 1 | xargs`
src_dbpass=`echo "${src_cfg}" | grep '^$CFG->dbpass' | cut -d "=" -f 2 | cut -d ';' -f 1 | xargs`
src_wwwroot=`echo "${src_cfg}" | grep '^$CFG->wwwroot' | cut -d "=" -f 2 | cut -d ';' -f 1 | xargs`
src_dataroot=`echo "${src_cfg}" | grep '^$CFG->dataroot' | cut -d "=" -f 2 | cut -d ';' -f 1 | xargs`
# Sync over the moodledata
rsync -avz ${src_moodlehost}:${src_dataroot}/ ${dest_moodledir}/moodledata/
# Dump src db to local file in dest_moodledir
ssh ${src_moodlehost} mysqldump --single-transaction --lock-tables=false --allow-keywords --opt -h${src_dbhost} -u${src_dbuser} -p${src_dbpass} ${src_dbname} >${dest_moodledir}/db/moodle3_${src_moodlehost}_dump.sql
# change sitename in db dump.
sed -e "s#${src_wwwroot}#${moodle3wwwroot}#g" ${dest_moodledir}/db/moodle3_${src_moodlehost}_dump.sql > ${dest_moodledir}/db/moodle3_${dest_basename}_dump.sql
# Import our newly munged database
/opt/usao/moodle3/bin/moodle3_importdb.sh ${dest_moodledir}
# Upgrade our db to the installed codebase
/opt/usao/moodle3/bin/moodle3_upgrade.sh ${dest_moodledir}
| USAO/ansible-role-moodle3 | files/moodle3_migrate.sh | Shell | mit | 2,116 |
#!/bin/bash
# if config file doesnt exist (wont exist until user changes a setting) then copy default config file
if [[ ! -f /config/core.conf ]]; then
echo "[info] Deluge config file doesn't exist, copying default..."
cp /home/nobody/deluge/core.conf /config/
else
echo "[info] Deluge config file already exists, skipping copy"
fi
echo "[info] Starting Deluge daemon..."
/usr/bin/deluged -d -c /config -L info -l /config/deluged.log | Toilal/stealthbox | .docker/deluge/home/deluge.sh | Shell | mit | 438 |
#!/usr/bin/env bash
phpize
./configure
make
sudo make install
make clean
php --re focusphp
| mylxsw/FocusPHP-Ext | startup.sh | Shell | mit | 92 |
#!/bin/bash -x
#
# Generated - do not edit!
#
# Macros
TOP=`pwd`
CND_CONF=default
CND_DISTDIR=dist
TMPDIR=build/${CND_CONF}/${IMAGE_TYPE}/tmp-packaging
TMPDIRNAME=tmp-packaging
OUTPUT_PATH=dist/${CND_CONF}/${IMAGE_TYPE}/PIC24_Dev_Board_Template.X.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
OUTPUT_BASENAME=PIC24_Dev_Board_Template.X.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
PACKAGE_TOP_DIR=pic24devboardtemplate.x/
# Functions
function checkReturnCode
{
rc=$?
if [ $rc != 0 ]
then
exit $rc
fi
}
function makeDirectory
# $1 directory path
# $2 permission (optional)
{
mkdir -p "$1"
checkReturnCode
if [ "$2" != "" ]
then
chmod $2 "$1"
checkReturnCode
fi
}
function copyFileToTmpDir
# $1 from-file path
# $2 to-file path
# $3 permission
{
cp "$1" "$2"
checkReturnCode
if [ "$3" != "" ]
then
chmod $3 "$2"
checkReturnCode
fi
}
# Setup
cd "${TOP}"
mkdir -p ${CND_DISTDIR}/${CND_CONF}/package
rm -rf ${TMPDIR}
mkdir -p ${TMPDIR}
# Copy files and create directories and links
cd "${TOP}"
makeDirectory ${TMPDIR}/pic24devboardtemplate.x/bin
copyFileToTmpDir "${OUTPUT_PATH}" "${TMPDIR}/${PACKAGE_TOP_DIR}bin/${OUTPUT_BASENAME}" 0755
# Generate tar file
cd "${TOP}"
rm -f ${CND_DISTDIR}/${CND_CONF}/package/pic24devboardtemplate.x.tar
cd ${TMPDIR}
tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/package/pic24devboardtemplate.x.tar *
checkReturnCode
# Cleanup
cd "${TOP}"
rm -rf ${TMPDIR}
| briandorey/PIC24-Dev-Board | Firmware/Template/PIC24 Dev Board Template.X/nbproject/Package-default.bash | Shell | mit | 1,459 |
#!/usr/bin/env bash
CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
SESSION_NAME="$1"
source "$CURRENT_DIR/helpers.sh"
dismiss_session_list_page_from_view() {
tmux send-keys C-c
}
session_name_not_provided() {
[ -z "$SESSION_NAME" ]
}
main() {
if session_name_not_provided; then
dismiss_session_list_page_from_view
exit 0
fi
if session_exists; then
dismiss_session_list_page_from_view
tmux switch-client -t "$SESSION_NAME"
else
"$CURRENT_DIR/show_goto_prompt.sh" "$SESSION_NAME"
fi
}
main
| disser/tmux-sessionist | scripts/switch_or_loop.sh | Shell | mit | 528 |
#!/bin/bash
STEP=$1
TEST=$2
case "$STEP" in
install)
echo "Installing..."
if [ -d vendor ]; then
chmod 777 -R vendor
rm -r vendor
fi
COMPOSER=dev.json composer install
;;
script)
echo "Run tests...";
if [ ! -d vendor ]; then
echo "Application not installed. Tests stopped. Exit with code 1"
exit 1
fi
case "$TEST" in
unit)
echo "Run phpunit --verbose --testsuite=unit...";
php vendor/bin/phpunit --verbose --testsuite=unit
;;
phpcs)
echo "Run phpcs --encoding=utf-8 --extensions=php --standard=psr2 Okvpn/ -p...";
php vendor/bin/phpcs --encoding=utf-8 --standard=psr2 -p src
;;
esac
;;
esac
| vtsykun/redis-message-queue | .builds/travis.sh | Shell | mit | 845 |
#!/bin/bash
set -x
set -e
source "/tools/common_rc/functions.sh"
apt-get update
# install SSHD
apt-get install -y --no-install-recommends ssh
# let root logins, disable password logins
sed -i \
-e 's/^#?UsePAM.*$/UsePAM no/g' \
-e 's/^#?PasswordAuthentication.*$/PasswordAuthentication yes/g' \
-e 's/^#?PermitRootLogin.*$/PermitRootLogin yes/g' \
-e 's/^#?UseDNS.*$/UseDNS no/g' \
-e 's/AcceptEnv LANG.*//' \
/etc/ssh/sshd_config
mkdir -p /var/run/sshd
echo 'root:root' | chpasswd
apt-get install -y --no-install-recommends openssh-client git git-flow
echo 'Setup nice PS1 to use with git...' \
&& wget -q "https://gist.githubusercontent.com/dariuskt/0e0b714a4cf6387d7178/raw/83065e2fead22bb1c2ddf809be05548411eabea7/git_bash_prompt.sh" -O /home/project/.git_bash_prompt.sh \
&& echo '. ~/.git_bash_prompt.sh' >> /home/project/.bashrc \
&& chown project:project /home/project/.git_bash_prompt.sh \
&& echo -e '\n\nDONE\n'
# make directories not that dark on dark background
echo 'DIR 30;47' > /home/project/.dircolors
chown project:project /home/project/.dircolors
echo 'adding git aliases...' \
&& echo alias gl=\"git log --pretty=format:\'%C\(bold blue\)%h %Creset-%C\(bold yellow\)%d %C\(red\)%an %C\(green\)%s\' --graph --date=short --decorate --color --all\" >> /home/project/.bash_aliases \
&& echo 'alias pull-all='\''CB=$(git branch | grep ^\* | cut -d" " -f2); git branch | grep -o [a-z].*$ | xargs -n1 -I{} bash -c "git checkout {} && git pull"; git checkout "$CB"'\' >> /home/project/.bash_aliases \
&& chown project:project /home/project/.bash_aliases \
&& echo -e '\n\nDONE\n'
echo 'enable bash_custom inside dev container'
echo 'if [ -f ~/.bash_custom ]; then . ~/.bash_custom ; fi' >> /home/project/.bashrc
# install mysql-client
apt-get install -y --no-install-recommends mariadb-client
# install composer
phpEnableModule json
phpEnableModule phar
phpEnableModule zip
phpEnableModule iconv
curl -sSL 'https://getcomposer.org/download/1.10.22/composer.phar' > /usr/local/bin/composer.phar
chmod a+x /usr/local/bin/composer.phar
ln -s /usr/local/bin/composer.phar /usr/local/bin/composer
composer self-update --1
# installl hiroku/prestissimo
phpEnableModule curl
sudo -u project composer --no-interaction global require "hirak/prestissimo:^0.3"
# disable enabled modules
phpDisableModule curl
phpDisableModule json
phpDisableModule phar
phpDisableModule zip
# install phpunit
apt-get install -y --no-install-recommends phpunit65
phpenmod phar
cp -frv /build/files/* /
# Clean up APT when done.
source /usr/local/build_scripts/cleanup_apt.sh
| nfq-technologies/docker-images | php70-dev/build/setup_docker.sh | Shell | mit | 2,588 |
#!/bin/sh
# Capture the current version.
VERSION=`cat ./VERSION`
# Currently using YUI Compressor for minification.
YUICOMPRESSOR=./3pty/yui/yuicompressor/yuicompressor-2.4.7.jar
# To use the YUI Compressor, Java is required (http://java.com).
JAVA=`which java`
if [ "$JAVA" == "" ]; then echo "Not found: java" ; exit 1 ; fi
# Output tmp version file (use comment style that works for both CSS and JS).
echo "/* uducada v$VERSION - https://github.com/m5n/uducada */" > ./version.tmp
# Process CSS files for each third-party UI framework.
UIFWKFILES=`find ./src/css/adapters/uifwk -type f`
for UIFWKFILE in $UIFWKFILES
do
# Extract framework identifier.
# Note: remove "-" for readability, e.g. jquery-ui => jqueryui.
UIFWKID=`expr "$UIFWKFILE" : ".*/\(.*\).css" | tr -d "-"`
echo "Generating uducada-$UIFWKID CSS files..."
# Generate unminified and minified versions of the CSS file.
# Note: add adapter file before uducada files.
cat $UIFWKFILE ./src/css/*.css > ./uducada.css.tmp
$JAVA -jar $YUICOMPRESSOR --type css -o ./uducada.min.css.tmp ./uducada.css.tmp
# Add the version file to the minified and unminified versions of the CSS file.
cat ./version.tmp ./uducada.css.tmp > ./uducada-$UIFWKID.css
cat ./version.tmp ./uducada.min.css.tmp > ./uducada-$UIFWKID.min.css
done
# Process JS files for each third-party JS and UI framework combination.
FWKFILES=`find ./src/js/adapters/fwk -type f`
for FWKFILE in $FWKFILES
do
# Extract framework identifier.
# Note: remove "-" for readability, e.g. jquery-ui => jqueryui.
FWKID=`expr "$FWKFILE" : ".*/\(.*\).js" | tr -d "-"`
UIFWKFILES=`find ./src/js/adapters/uifwk -type f`
for UIFWKFILE in $UIFWKFILES
do
# Extract framework identifier.
# Note: remove "-" for readability, e.g. jquery-ui => jqueryui.
UIFWKID=`expr "$UIFWKFILE" : ".*/\(.*\).js" | tr -d "-"`
echo "Generating uducada-$FWKID-$UIFWKID JS files..."
# Generate unminified and minified versions of the JS file.
# Note: add adapter files before uducada files.
cat $FWKFILE $UIFWKFILE ./src/js/*.js > ./uducada.js.tmp
$JAVA -jar $YUICOMPRESSOR --type js -o ./uducada.min.js.tmp ./uducada.js.tmp
# Add the version file to the minified and unminified versions of the CSS file.
cat ./version.tmp ./uducada.js.tmp > ./uducada-$FWKID-$UIFWKID.js
cat ./version.tmp ./uducada.min.js.tmp > ./uducada-$FWKID-$UIFWKID.min.js
done
done
# Delete all tmp files.
rm ./*.tmp
| m5n/uducada | build.sh | Shell | mit | 2,544 |
#!/usr/bin/env bash
set -e
function hr {
printf '%*s\n' "${COLUMNS:-$(tput cols)}" '' | tr ' ' -
}
while getopts ":u" opt; do
case $opt in
u)
docker pull blengerich/genamap || { echo "failed to pull the image" >&2; exit 1; }
hr
echo 'Pulled the GenAMap docker image'
hr
;;
\?)
echo "Invalid option: -$OPTARG" >&2
exit 1
;;
esac
done
if [ ! -d ./mongodbpath ]; then
mkdir mongodbpath
fi
if [ ! -d ./postgresdbpath ]; then
mkdir postgresdbpath
fi
# Run MongoDB Container
m_name="genamap_mongo"
if ! docker ps --format "{{.Names}}"| grep -q ${m_name}; then
if ! docker ps -a --format "{{.Names}}"| grep -q ${m_name}; then
docker run -v "$(pwd)/mongodbpath":/data -p 27017:27017 --name ${m_name} -d mongo mongod --smallfiles \
|| { echo 'starting mongo failed' >&2; exit 1; }
else
docker start ${m_name} || { echo "starting mongo failed" >&2; exit 1; }
fi
hr
echo "MongoDB container has been successfully launched!"
hr
else
hr
echo "MongoDB container is already running..."
hr
fi
# Run PostgresSQL container
p_name="genamap_postgres"
if ! docker ps --format "{{.Names}}"| grep -q ${p_name}; then
if ! docker ps -a --format "{{.Names}}"| grep -q ${p_name}; then
docker run --name ${p_name} -p 5432:5432 -v "$(pwd)/postgresdbpath":/var/lib/postgresql/data \
-e POSTGRES_PASSWORD='!!GeNaMaPnew00' -e POSTGRES_USER='postgres' -d postgres \
|| { echo "starting postgres failed" >&2; exit 1; }
else
docker start ${p_name} || { echo "starting postgres failed" >&2; exit 1; }
fi
hr
echo "PostgreSQL container has been successfully launched!"
hr
else
hr
echo "PostgreSQL container is already running..."
hr
fi
# Enter the GenAMap container
g_name="genamap_development_server"
hr
echo "Entering the GenAMap development server container..."
hr
echo $1
echo $2
#docker run -ti -p 3000:3000 -p 3001:3001 --name ${g_name} --link ${m_name}:mongo --link ${p_name}:postgres \
# -w /usr/src/genamap \-v ${PWD}/../src/:/usr/src/genamap -v $1:/usr/src/genamap2 -v $2:/usr/src/genamap3 blengerich/genamap
if ! docker ps --format "{{.Names}}" | grep -q ${g_name}; then
if docker ps -a --format "{{.Names}}"| grep -q ${g_name}; then
docker start ${g_name} \
|| { echo "starting genamap failed" >&2; exit 1; }
docker exec -it ${g_name} bash \
|| { echo "starting genamap failed" >&2; exit 1; }
else
docker run -ti -p 3000:3000 -p 3001:3001 --name ${g_name} --link ${m_name}:mongo --link ${p_name}:postgres \
-w /usr/src/genamap \-v ${PWD}/../src/:/usr/src/genamap -v $1:/usr/src/genamap_data -v $2:/usr/src/genamap_config blengerich/genamap \
|| { echo "starting genamap failed" >&2; exit 1; }
fi
else
docker exec -it ${g_name} bash
fi
| blengerich/GenAMap | scripts/dev_genamap.sh | Shell | mit | 2,991 |
#!/bin/bash
#SBATCH --partition=mono
#SBATCH --ntasks=1
#SBATCH --time=4-0:00
#SBATCH --mem-per-cpu=8000
#SBATCH -J Deep-RBM_DBM_4_inc_bin_PARAL_base
#SBATCH -e Deep-RBM_DBM_4_inc_bin_PARAL_base.err.txt
#SBATCH -o Deep-RBM_DBM_4_inc_bin_PARAL_base.out.txt
source /etc/profile.modules
module load gcc
module load matlab
cd ~/deepLearn && srun ./deepFunction 4 'RBM' 'DBM' '128 1000 1500 10' '0 1 1 1' '4_inc_bin' 'PARAL_base' "'iteration.n_epochs', 'learning.lrate', 'learning.cd_k', 'learning.persistent_cd', 'parallel_tempering.use'" '200 1e-3 1 0 1' "'iteration.n_epochs', 'learning.persistent_cd'" '200 1' | aciditeam/matlab-ts | jobs/deepJobs_RBM_DBM_4_inc_bin_PARAL_base.sh | Shell | mit | 619 |
#!/bin/bash -x
#
# Generated - do not edit!
#
# Macros
TOP=`pwd`
CND_CONF=default
CND_DISTDIR=dist
TMPDIR=build/${CND_CONF}/${IMAGE_TYPE}/tmp-packaging
TMPDIRNAME=tmp-packaging
OUTPUT_PATH=dist/${CND_CONF}/${IMAGE_TYPE}/c_negador2.X.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
OUTPUT_BASENAME=c_negador2.X.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
PACKAGE_TOP_DIR=cnegador2.x/
# Functions
function checkReturnCode
{
rc=$?
if [ $rc != 0 ]
then
exit $rc
fi
}
function makeDirectory
# $1 directory path
# $2 permission (optional)
{
mkdir -p "$1"
checkReturnCode
if [ "$2" != "" ]
then
chmod $2 "$1"
checkReturnCode
fi
}
function copyFileToTmpDir
# $1 from-file path
# $2 to-file path
# $3 permission
{
cp "$1" "$2"
checkReturnCode
if [ "$3" != "" ]
then
chmod $3 "$2"
checkReturnCode
fi
}
# Setup
cd "${TOP}"
mkdir -p ${CND_DISTDIR}/${CND_CONF}/package
rm -rf ${TMPDIR}
mkdir -p ${TMPDIR}
# Copy files and create directories and links
cd "${TOP}"
makeDirectory ${TMPDIR}/cnegador2.x/bin
copyFileToTmpDir "${OUTPUT_PATH}" "${TMPDIR}/${PACKAGE_TOP_DIR}bin/${OUTPUT_BASENAME}" 0755
# Generate tar file
cd "${TOP}"
rm -f ${CND_DISTDIR}/${CND_CONF}/package/cnegador2.x.tar
cd ${TMPDIR}
tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/package/cnegador2.x.tar *
checkReturnCode
# Cleanup
cd "${TOP}"
rm -rf ${TMPDIR}
| tocache/picomones | UPC Microcontroladores 2018-1/Ejercicios en XC8/c_negador2.X/nbproject/Package-default.bash | Shell | cc0-1.0 | 1,383 |
java -Djava.ext.dirs=reggie-libs -Djava.util.logging.config.file=config/logging.properties -jar reggie-libs/start.jar config/start-reggie.config
| paawak/blog | code/jini/unsecure/jini-services/start-reggie.sh | Shell | gpl-2.0 | 145 |
#!/bin/sh -e
# Glen Pitt-Pladdy (ISC)
. ./functions.sh
depends_SLES() {
echo >/dev/null
}
depends_RHEL() {
[ -x /usr/bin/expect ] || yum install -y expect
}
trakapacheconf_SLES() {
CONFDIR=/etc/apache2/conf.d
CONF=$CONFDIR/t2016-$TRAKNS.conf
echo $CONF
}
trakapacheconf_RHEL() {
CONFDIR=/etc/httpd/conf.d
CONF=$CONFDIR/t2016-$TRAKNS.conf
echo $CONF
}
apacherestart_SLES() {
[ -x /usr/sbin/httpd2 ] && service apache2 restart
return 0
}
apacherestart_RHEL() {
[ -x /usr/sbin/httpd ] && service httpd restart
return 0
}
echo "########################################"
INST=`instname $SITE $ENV DB$VER`
TRAKNS=`traknamespace $SITE $ENV`
TRAKPATH=`trakpath $SITE $ENV DB$VER`
echo "Vanilla Trak $VER Install for $SITE : $ENV ($INST: $TRAKNS)"
# check if we need to do this
if [ -f ${TRAKPATH}/web/default.htm -a -f ${TRAKPATH}/db/data/CACHE.DAT ]; then
echo "Already appears to be web and databases installed"
exit 0
fi
# get cache password if needed
if [ -z "$CACHEPASS" ]; then
getpass "Caché Password" CACHEPASS 1
fi
# get Trak zip password if needed
if [ -z "$TRAKZIPPASS" ]; then
getpass "TrakCare .zip Password" TRAKZIPPASS 1
fi
# find installer
#installer=`locatefilestd $VER_*_R*_B*.zip`
installer=/trak/iscbuild/installers/T2015_20150331_1957_ENXX_R0_FULL_B10.zip
#installer=/trak/iscbuild/installers/2014_20140902_1034_R4ENXX_B32.zip
#installer=/trak/iscbuild/installers/T2015_20150527_1736_DEV_ENXX_FULL_B231.zip
echo $installer
# check for target web/ directory
if [ ! -d ${TRAKPATH}/web ]; then
echo "FATAL - expecting \"${TRAKPATH}/web/\" to be created with appropriate permissions in advance" >&2
exit 1
fi
# install dependancies
osspecific depends
# check that expect is available
if [ ! -x /usr/bin/expect ]; then
echo "FATAL - can't find executable /usr/bin/expect" >&2
exit 1
fi
# check it's already installed
if [ -f ${TRAKPATH}/web/default.htm ]; then
echo "Install (web/default.htm) already exists - skipping"
exit 0
fi
# check we are root
if [ `whoami` != 'root' ]; then
echo "Being run as user `whoami` - should be run as root"
exit 1
fi
# install T2014
mkdir $TMPDIR/trakextract
cp expect/TrakVanillaT2015_Install_install.expect $TMPDIR/trakextract
chmod 755 $TMPDIR/trakextract/TrakVanillaT2015_Install_install.expect
olddir=`pwd`
cd $TMPDIR/trakextract
${olddir}/expect/TrakVanillaT2014_Install_unzip.expect $installer
chown $CACHEUSR.$CACHEGRP $TMPDIR/trakextract -R
$TMPDIR/trakextract/TrakVanillaT2015_Install_install.expect $INST $TMPDIR/trakextract $ENV $TRAKNS ${TRAKPATH} /trakcare
cd ${olddir}
rm -r $TMPDIR/trakextract
# fix up database naming to UK convention
ccontrol stop $INST nouser
UCSITE=`echo $SITE | tr '[:lower:]' '[:upper:]'`
sed -i "s/^$TRAKNS=$ENV-DATA,$ENV-APPSYS/$TRAKNS=$TRAKNS-DATA,$TRAKNS-APPSYS/" ${TRAKPATH}/hs/cache.cpf
sed -i "s/^$ENV-/$TRAKNS-/" ${TRAKPATH}/hs/cache.cpf
sed -i "s/\(Global_.*\|Routine_.*\|Package_.*\)=$ENV-/\1=$TRAKNS-/" ${TRAKPATH}/hs/cache.cpf
./expect/TrakVanillaT2014_Install_start.expect $INST
# change web/ directory to use site code (and possibly create lc symlink)
cd ${TRAKPATH}/web/custom/
mv $TRAKNS/ $SITE_UC
#ln -s $SITE_UC $SITE_LC
cd ${olddir}
# change config in Configuration Manager
./expect/TrakVanillaT2014_Install_cleanup.expect $INST $TRAKNS $SITE_UC ${TRAKPATH}/web/custom/$SITE_UC/cdl
# fix web/ permissions
chown $CACHEUSR.$CACHEGRP ${TRAKPATH}/web -R
find ${TRAKPATH}/web -type d -exec chmod 2770 {} \;
find ${TRAKPATH}/web -type f -exec chmod 660 {} \;
## install the apache config
#osspecific trakapacheconf
##apacheconf=`osspecific trakapacheconf`
#if [ -d $CONFDIR -a -f /opt/cspgateway/bin/CSP.ini ]; then
# apacheconf=$CONF
# cp conffiles/apache-t2016.conf $apacheconf
# chmod 644 $apacheconf
# # apply custom settings
# sed -i 's/TRAKWEBAPP/\/trakcare/g' $apacheconf
# sed -i "s/TRAKWEBDIR/`path2regexp ${TRAKPATH}/web`/g" $apacheconf
# # add in CSP config
# ini_update.pl /opt/cspgateway/bin/CSP.ini \
# '[APP_PATH:/trakcare]GZIP_Compression=Enabled' \
# '[APP_PATH:/trakcare]GZIP_Exclude_File_Types=jpeg gif ico png' \
# '[APP_PATH:/trakcare]Response_Size_Notification=Chunked Transfer Encoding and Content Length' \
# '[APP_PATH:/trakcare]KeepAlive=No Action' \
# '[APP_PATH:/trakcare]Non_Parsed_Headers=Enabled' \
# '[APP_PATH:/trakcare]Alternative_Servers=Disabled' \
# "[APP_PATH:/trakcare]Alternative_Server_0=1~~~~~~$INST" \
# "[APP_PATH:/trakcare]Default_Server=$INST" \
# '[APP_PATH_INDEX]/trakcare=Enabled'
#else
# echo "Skipping Trak Config (no Apache and/or CSP)"
#fi
#osspecific apacherestart
| casep/isc_coding | trakautomation/do_TrakVanillaT2015_Install.sh | Shell | gpl-2.0 | 4,564 |
#!/bin/bash
#
###########################################################
# copie_clepub_veyon.sh
# Ce script est à lancer sur les clients veyon
# Il recupère la clé publique "teacher" du poste "maitre" veyon
# 20180327
##########################################################
DATE1=$(date +%F+%0kh%0M)
#Couleurs
ROUGE="\\033[1;31m"
VERT="\\033[1;32m"
BLEU="\\033[1;34m"
JAUNE="\\033[1;33m"
COLTITRE="\033[1;35m" # Rose
COLDEFAUT="\033[0;33m" # Brun-jaune
COLCMD="\033[1;37m" # Blanc
COLERREUR="\033[1;31m" # Rouge
COLTXT="\033[0;37m" # Gris
COLINFO="\033[0;36m" # Cyan
COLPARTIE="\033[1;34m" # Bleu
cle=key
repcle=/etc/veyon/keys/public/teacher
echo -e "$JAUNE"
echo "###########################################################################"
echo ""
echo "Voulez-vous configurer veyon (client) sur ce poste ?"
echo "Il faudra indiquer l'adresse ip du poste maitre veyon et son mdp root."
echo ""
echo "###########################################################################"
echo -e "$COLTXT"
echo -e "$COLINFO"
read -p "Que voulez-vous faire ?
1 (je veux configurer veyon sur ce client)
2 (non, je veux sortir !) : " rep
echo -e "$COLTXT"
case $rep in
1 )
echo -e "$JAUNE"
echo "Entrez l'adresse ip du poste veyon-master : "
echo -e "$COLTXT"
read IPMAST
cd /etc
rm -rf veyon
cd
mkdir -p $repcle
#mv $repcle/$cle $repcle/cle_$DATE1
cd $repcle
scp root@$IPMAST:$repcle/$cle . || ERREUR="1"
if [ "$ERREUR" = "1" ];then
echo -e "$COLERREUR"
echo "Erreur lors de la copie de la clé..."
echo "Vérifier l'adresse ip du poste veyon-master puis relancez le script."
echo -e "$COLTXT"
else
# Ajustement des droits et message
cd /etc
chmod -R 777 veyon/
echo -e "$VERT"
echo "La clé a été copiée depuis le poste veyon-master $IPMAST"
echo -e "$COLTXT"
# Neutralisation de master et de configurator sur le poste eleve
chmod -x /usr/bin/veyon-master
chmod -x /usr/bin/veyon-configurator
cd /usr/share/applications
mv veyon-master.desktop veyon-master.desktop.bak
mv veyon-configurator.desktop veyon-configurator.desktop.bak
cd
echo -e "$COLDEFAUT"
echo "Info : veyon-master et veyon-configurator ont été désactivés sur ce poste."
echo -e "$COLTXT"
fi
;;
* )
echo -e "$COLINFO"
echo "Pas de copie demandée."
echo -e "$VERT"
echo "###########################################################################"
echo ""
echo "Vous pourrez copier la clé publique veyon plus tard en lançant le script"
echo "/mnt/netlogon/alancer/copie_clepub_veyon.sh"
echo ""
echo "###########################################################################"
echo -e "$COLINFO"
echo "A bientôt !"
echo -e "$COLTXT"
exit 0
;;
esac
echo "Terminé !"
exit 0
| jcmousse/clinux | se3/alancer/copie_clepub_veyon.sh | Shell | gpl-2.0 | 3,308 |
# export AWS_ACCESS_KEY="Your-Access-Key"
# export AWS_SECRET_KEY="Your-Secret-Key"
today=`date +"%d-%m-%Y","%T"`
logfile="/awslog/automation-instances.log"
# Grab all Instance IDs for REBOOT action and export the IDs to a text file
sudo aws ec2 describe-instances --filters Name=tag:reboot-time,Values=18-00 Name=tag:bash-profile,Values=wd --query Reservations[*].Instances[*].[InstanceId] --output text > ~/tmp/reboot_wd_instance_info.txt 2>&1
# Take list of rebooting instances
for instance_id in $(cat ~/tmp/reboot_wd_instance_info.txt)
do
# Reboot instances
rebootresult=$(sudo aws ec2 reboot-instances --instance-ids $instance_id)
# Put info into log file
echo Atempt to reboot $today instances by AWS CLI with result $rebootresult --EMPTY FOR NO RESULT-- >> $logfile
done
| STARTSPACE/aws-ec2-start-stop-reboot-by-timetable | reboot/wd/reboot-wd-18.sh | Shell | gpl-2.0 | 785 |
#! /bin/sh
# Copyright (C) 2003-2018 Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# Make sure empty calls to AC_CONFIG_FILES or AC_CONFIG_HEADERS are diagnosed.
. test-init.sh
# We avoid using configure.ac stub initialized by our testsuite setup, as
# we need to keep track of line numbers (to grep for error messages).
cat > configure.ac << END
AC_INIT([$me], [1.0])
AM_INIT_AUTOMAKE
AC_CONFIG_FILES ([oops])
AC_CONFIG_HEADERS
AC_OUTPUT
END
$ACLOCAL
AUTOMAKE_fails
grep 'configure\.ac:3:.* arguments .*AC_CONFIG_FILES' stderr
grep 'configure\.ac:4:.* arguments .*AC_CONFIG_HEADERS' stderr
:
| komh/automake-os2 | t/conff2.sh | Shell | gpl-2.0 | 1,205 |
#!/bin/bash
for jobdir in $(ls -l|grep ^d|awk '{print $9}');do
# Determine job type
jobtype=$(echo $jobdir|cut -d '-' -f4)
case $jobtype in
absperm)
sub="qsub -lwalltime=08:00:00 -lnodes=1:ppn=8 -N $jobdir -M [email protected] /home/steel/runscript-absperm.sh"
;;
rwabsperm)
sub="qsub -lwalltime=08:00:00 -lnodes=1:ppn=8 -N $jobdir -M [email protected] /home/steel/rwabsperm.sh"
;;
rwff)
sub="qsub -lwalltime=08:00:00 -lnodes=1:ppn=8 -N $jobdir -M [email protected] /home/steel/rwformationfactor.sh"
;;
esac
# Submit the job if jobdir has no .out files
if (( $(ls -l $jobdir|grep -c .out) == 0 ));then
cd $jobdir
$sub
cd ..
fi
done
| simontakite/sysadmin | cluster/stallo/submitall.sh | Shell | gpl-2.0 | 759 |
#!/bin/bash
for f in sml/*.sml; do { time ../bin/yasmin $f ; } 2>&1 | tee logs/$f.txt ; done
exit
| mwcoomber/Yasmin | examples/run_tests.sh | Shell | gpl-2.0 | 100 |
#!/bin/sh
# Copyright (C) 1999-2005 ImageMagick Studio LLC
#
# This program is covered by multiple licenses, which are described in
# LICENSE. You should have received a copy of LICENSE with this
# package; otherwise see http://www.imagemagick.org/script/license.php.
. ${srcdir}/tests/common.shi
${RUNENV} ${MEMCHECK} ./rwfile ${SRCDIR}/input_truecolor16.dpx PPM
| atmark-techno/atmark-dist | user/imagemagick/tests/rwfile_PPM_truecolor16.sh | Shell | gpl-2.0 | 365 |
#!/bin/sh
# Copyright (C) 2012 Red Hat, Inc. All rights reserved.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions
# of the GNU General Public License v.2.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
. lib/inittest
test -e LOCAL_LVMETAD || skip
aux prepare_devs 2
pvcreate --metadatatype 1 "$dev1"
should vgscan --cache
pvs | should grep "$dev1"
vgcreate --metadatatype 1 $vg1 "$dev1"
should vgscan --cache
vgs | should grep $vg1
pvs | should grep "$dev1"
# check for RHBZ 1080189 -- SEGV in lvremove/vgremove
pvcreate -ff -y --metadatatype 1 "$dev1" "$dev2"
vgcreate --metadatatype 1 $vg1 "$dev1" "$dev2"
lvcreate -l1 $vg1
pvremove -ff -y "$dev2"
vgchange -an $vg1
not lvremove $vg1
not vgremove -ff -y $vg1
| twitter/bittern | lvm2/test/shell/lvmetad-lvm1.sh | Shell | gpl-2.0 | 972 |
#!/bin/bash
if [ -z $1 ]
then
exit 0
fi
ip=$1
rules=$2
trunk=$3
access=$4
general_rules='/etc/isida/isida.conf'
get_uplink=`grep 'uplink' $general_rules | cut -d= -f2 | sed -e s/%ip/$ip/`
ism_vlanid=`grep 'ism_vlanid' $general_rules | cut -d= -f2`
port_count=`echo $5 | sed -e s/://`
args=''
count=0
enum_pars=`cat $rules | grep -v '#' | grep '\.x\.' | cut -d. -f1 | uniq`
raw_fix='/tmp/'`date +%s%N`'-fix'
not_access=`/usr/local/sbin/invert_string_interval.sh $access $port_count`
not_trunk=`/usr/local/sbin/invert_string_interval.sh $trunk $port_count`
# Traffic control
traf_control_thold=`grep traffic_control_bcast_threshold $rules | cut -d= -f2`
traffic_control_trap="config traffic control_trap both"
access_ports="`/usr/local/sbin/interval_to_string.sh $access`"
not_access_ports="`/usr/local/sbin/interval_to_string.sh $not_access`"
traffic_control_string=""
for i in $access_ports
do
traffic_control_string=$traffic_control_string"\nconfig traffic control $i broadcast enable multicast enable unicast disable action drop broadcast_threshold $traf_control_thold multicast_threshold 128 unicast_threshold 131072 countdown 0 time_interval 5"
done
for i in $not_access_ports
do
traffic_control_string=$traffic_control_string"\nconfig traffic control $i broadcast disable multicast disable unicast disable action drop broadcast_threshold $traf_control_thold multicast_threshold 128 unicast_threshold 131072 countdown 0 time_interval 5"
done
# LBD
if [ "`grep lbd_state $rules | cut -d= -f2`" = "enable" ]
then
lbd_state="enable loopdetect"
else
lbd_state="disable loopdetect"
fi
lbd_trap="config loopdetect trap `grep lbd_trap $rules | cut -d= -f2`"
lbd_on=""
for i in $access_ports
do
lbd_on=$lbd_on"\nconfig loopdetect ports $i state enable"
done
lbd_off=""
for i in $not_access_ports
do
lbd_off=$lbd_off"\nconfig loopdetect ports $i state disable"
done
#lbd_off="config loopdetect ports $not_access state disabled"
# Safeguard
sg_state=`grep safeguard_state $rules | cut -d= -f2`
sg_rise=`grep safeguard_rising $rules | cut -d= -f2`
sg_fall=`grep safeguard_falling $rules | cut -d= -f2`
if [ "`grep safeguard_trap $rules | cut -d= -f2`" = "yes" ]
then
sg_trap="enable"
else
sg_trap="disable"
fi
safeguard_string="config safeguard_engine state $sg_state utilization rising $sg_rise falling $sg_fall trap_log $sg_trap mode fuzzy"
# Other
snmp_traps="enable snmp traps\nenable snmp authenticate traps"
dhcp_local_relay="disable dhcp_local_relay"
dhcp_snooping="disable address_binding dhcp_snoop"
impb_acl_mode="disable address_binding acl_mode"
dhcp_screening="config filter dhcp_server ports all state disable\nconfig filter dhcp_server ports $access state enable"
netbios_filter="config filter netbios all state disable\nconfig filter netbios $access state enable"
impb_trap="enable address_binding trap_log"
cpu_interface_filtering="enable cpu_interface_filtering"
arp_aging_time="config arp_aging time `grep arp_aging_time $rules | cut -d= -f2`"
igmp_snooping="enable igmp_snooping"
link_trap="enable snmp linkchange_traps\nconfig snmp linkchange_traps ports 1-28 enable"
# SNTP
sntp_addr1=`grep sntp_primary $rules | cut -d= -f2 | awk -F:: '{print $1}'`
sntp_addr2=`grep sntp_primary $rules | cut -d= -f2 | awk -F:: '{print $2}'`
sntp_string="enable sntp\nconfig sntp primary $sntp_addr1 secondary $sntp_addr2 poll-interval 720\nconfig sntp primary $sntp_addr2 secondary $sntp_addr1"
# IGMP acc auth
igmp_acc_auth_enabled="config igmp access_authentication ports $access state enable"
igmp_acc_auth_disabled="config igmp access_authentication ports $not_access state disable"
# Limited mcast
range1="config limited_multicast_addr ports $access add profile_id 1\nconfig limited_multicast_addr ports $not_access delete profile_id 1"
range2="config limited_multicast_addr ports $access add profile_id 2\nconfig limited_multicast_addr ports $not_access delete profile_id 2"
range3="config limited_multicast_addr ports $access add profile_id 3\nconfig limited_multicast_addr ports $not_access delete profile_id 3"
range4="config limited_multicast_addr ports $access add profile_id 4\nconfig limited_multicast_addr ports $not_access delete profile_id 4"
range5="config limited_multicast_addr ports $access add profile_id 5\nconfig limited_multicast_addr ports $not_access delete profile_id 5"
limited_access="config limited_multicast_addr ports $access access permit"
limited_deny="config limited_multicast_addr ports $trunk access deny"
# SYSLOG
syslog_ip=`grep 'syslog_host.x.ip' $rules | cut -d= -f2`
#syslog_severity=`grep 'syslog_host.x.severity' $rules | cut -d= -f2`
syslog_facility=`grep 'syslog_host.x.facility' $rules | cut -d= -f2`
syslog_state=`grep 'syslog_host.x.state' $rules | cut -d= -f2`
syslog_del="delete syslog host 2"
syslog_add="create syslog host 2 ipaddress $syslog_ip severity debug facility $syslog_facility state $syslog_state"
syslog_enabled="enable_syslog"
# SNMP
snmp_ip=`grep 'snmp_host.x.ip' $rules | cut -d= -f2`
snmp_community=`grep 'snmp_host.x.community' $rules | cut -d= -f2`
snmp_del="delete snmp host $snmp_ip\ndelete snmp host 192.168.1.120"
snmp_add="create snmp host $snmp_ip v2c $snmp_community"
# RADIUS
radius_ip=`grep 'radius.x.ip' $rules | cut -d= -f2`
radius_key=`grep 'radius.x.key' $rules | cut -d= -f2`
radius_auth=`grep 'radius.x.auth' $rules | cut -d= -f2`
radius_acct=`grep 'radius.x.acct' $rules | cut -d= -f2`
radius_retransmit=`grep 'radius_retransmit' $rules | cut -d= -f2`
radius_timeout=`grep 'radius_timeout' $rules | cut -d= -f2`
radius_del="config radius delete 1"
radius_add="config radius add 1 $radius_ip key $radius_key auth_port $radius_auth acct_port $radius_acct"
radius_params="config radius 1 timeout $radius_timeout retransmit $radius_retransmit"
for i in $@
do
case $i in
"traffic_control_trap") echo -e "$traffic_control_string" >> $raw_fix;;
"traffic_control_bcast") echo -e "$traffic_control_string" >> $raw_fix;;
"traffic_control_mcast") echo -e "$traffic_control_string" >> $raw_fix;;
"traffic_control_bcast_threshold") echo -e "$traffic_control_string" >> $raw_fix;;
"traffic_control_mcast_threshold") echo -e "$traffic_control_string" >> $raw_fix;;
"lbd_state") echo -e "$lbd_state" >> $raw_fix;;
"lbd_on") echo -e "$lbd_on" >> $raw_fix;;
"lbd_off") echo -e "$lbd_off" >> $raw_fix;;
"lbd_trap") echo -e "$lbd_trap" >> $raw_fix;;
"safeguard_state") echo -e "$safeguard_string" >> $raw_fix;;
"safeguard_trap") echo -e "$safeguard_string" >> $raw_fix;;
"safeguard_rising") echo -e "$safeguard_string" >> $raw_fix;;
"safeguard_falling") echo -e "$safeguard_string" >> $raw_fix;;
"snmp_traps") echo -e "$snmp_traps" >> $raw_fix;;
"dhcp_local_relay") echo -e "$dhcp_local_relay" >> $raw_fix;;
"dhcp_snooping") echo -e "$dhcp_snooping" >> $raw_fix;;
"impb_acl_mode") echo -e "$impb_acl_mode" >> $raw_fix;;
"dhcp_screening") echo -e "$dhcp_screening" >> $raw_fix;;
"netbios_filter") echo -e "$netbios_filter" >> $raw_fix;;
"impb_trap") echo -e "$impb_trap" >> $raw_fix;;
"cpu_interface_filtering") echo -e "$cpu_interface_filtering" >> $raw_fixing;;
"arp_aging_time") echo -e "$arp_aging_time" >> $raw_fix;;
"sntp_state") echo -e "$sntp_string" >> $raw_fix;;
"sntp_primary") echo -e "$sntp_string" >> $raw_fix;;
"sntp_secondary") echo -e "$sntp_string" >> $raw_fix;;
"link_trap") echo -e "$link_trap" >> $raw_fix;;
"mcast_range.iptv1") echo -e "$range1\n$limited_access\n$limited_deny" >> $raw_fix;;
"mcast_range.iptv2") echo -e "$range2\n$limited_access\n$limited_deny" >> $raw_fix;;
"mcast_range.iptv3") echo -e "$range3\n$limited_access\n$limited_deny" >> $raw_fix;;
"mcast_range.iptv4") echo -e "$range4\n$limited_access\n$limited_deny" >> $raw_fix;;
"mcast_range.iptv5") echo -e "$range5\n$limited_access\n$limited_deny" >> $raw_fix;;
"igmp_acc_auth_enabled") echo -e "$igmp_acc_auth_enabled" >> $raw_fix;;
"igmp_acc_auth_disabled") echo -e "$igmp_acc_auth_disabled" >> $raw_fix;;
"syslog_host") echo -e "$syslog_del\n$syslog_add" >> $raw_fix;;
"snmp_host") echo -e "$snmp_del\n$snmp_add" >> $raw_fix;;
"radius") echo -e "$radius_del\n$radius_add\n$radius_params" >> $raw_fix;;
"radius_retransmit") echo -e "$radius_params" >> $raw_fix;;
"radius_timeout") echo -e "$radius_params" >> $raw_fix;;
"igmp_snooping") echo -e "$igmp_snooping" >> $raw_fix;;
"syslog_enabled") echo -e "$syslog_enabled" >> $raw_fix;;
"ism") if [ `/usr/local/sbin/ping_equip.sh $ip` -eq 1 ]
then
ism_prefix='.1.3.6.1.4.1.171.12.64.3.1.1'
ism_name=`snmpget -v2c -c dlread -Ovq $ip $ism_prefix.2.$ism_vlanid | sed -e s/\"//g`
uplink=`$get_uplink`
raw_tagmember=`snmpget -v2c -c dlread -Ovq $ip $ism_prefix.5.$ism_vlanid | sed -e s/\"//g | awk '{print $1 $2 $3 $4}' | xargs -l /usr/local/sbin/portconv.sh`
raw_member=`snmpget -v2c -c dlread -Ovq $ip $ism_prefix.4.$ism_vlanid | sed -e s/\"//g | awk '{print $1 $2 $3 $4}' | xargs -l /usr/local/sbin/portconv.sh`
raw_source=`snmpget -v2c -c dlread -Ovq $ip $ism_prefix.3.$ism_vlanid | sed -e s/\"//g | awk '{print $1 $2 $3 $4}' | xargs -l /usr/local/sbin/portconv.sh`
tagmember=`/usr/local/sbin/string_to_bitmask.sh $raw_tagmember | xargs -l /usr/local/sbin/bitmask_to_interval.sh`
source=`/usr/local/sbin/string_to_bitmask.sh $raw_source | xargs -l /usr/local/sbin/bitmask_to_interval.sh`
echo -e "config igmp_snooping multicast_vlan $ism_name del tag $tagmember" >> $raw_fix
echo -e "config igmp_snooping multicast_vlan $ism_name del source $source" >> $raw_fix
detailed_trunk=`/usr/local/sbin/interval_to_string.sh $trunk`
del_member_raw=''
for i in $detailed_trunk
do
if [ "`echo $raw_member | grep $i`" ]
then
del_member_raw=$del_member_raw" $i"
fi
done
if [ -n "$del_member_raw" ]
then
del_member=`/usr/local/sbin/string_to_bitmask.sh $del_member_raw | xargs -l /usr/local/sbin/bitmask_to_interval.sh`
echo -e "config igmp_snooping multicast_vlan $ism_name del member $del_member" >> $raw_fix
fi
new_source=$uplink
new_tagmember=`echo $detailed_trunk | sed -e s/$uplink// | xargs -l /usr/local/sbin/string_to_bitmask.sh | xargs -l /usr/local/sbin/bitmask_to_interval.sh`
echo -e "config igmp_snooping multicast_vlan $ism_name add source $new_source" >> $raw_fix
echo -e "config igmp_snooping multicast_vlan $ism_name add tag $new_tagmember" >> $raw_fix
fi;;
esac
done
fix_cmd='/tmp/'`date +%s%N`'_fix'
if [ -s $raw_fix ]
then
echo "save" >> $raw_fix
fi
cat $raw_fix | uniq
rm -f $rules $raw_fix
| vlad-syan/isida | usr/local/sbin/fix_3528.sh | Shell | gpl-2.0 | 11,905 |
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin
export PATH
Sig="/home/dai/tmp/Assignment#3/Size/Signature/BatchProcessing.m"
Rect="/home/dai/tmp/Assignment#3/Size/Rectangle/BatchProcessing.sh"
Eva="/home/dai/tmp/Assignment#3/Size/Evaluation/BatchProcessing.m"
MatlabExe="/opt/Matlab2013/bin/matlab"
${MatlabExe} -nodesktop -nosplash -r "run ${Sig};quit"
sh ${Rect}
${MatlabExe} -nodesktop -nosplash -r "run ${Eva};quit"
| zhenglab/2015SpringCV | Assignments/Assignment3Solutions/戴嘉伦/Grab/Size/Batch_Size.sh | Shell | gpl-2.0 | 451 |
#!/bin/bash
RRD=/data/mirror/rrd
PNG=/data/mirror/www/size
for file in $RRD/*; do
tree=`basename $file`
tree=${tree/.rrd/}
rrdtool graph $PNG/$tree-week.png --imgformat PNG \
--end now --start end-1w \
"DEF:raw=$file:size:AVERAGE" "CDEF:size=raw,1048576,*" \
"AREA:size#507AAA" -l 0 -M >/dev/null
rrdtool graph $PNG/$tree-month.png --imgformat PNG \
--end now --start end-1m \
"DEF:raw=$file:size:AVERAGE" "CDEF:size=raw,1048576,*" \
"AREA:size#507AAA" -l 0 -M >/dev/null
rrdtool graph $PNG/$tree-year.png --imgformat PNG \
--end now --start end-1y \
"DEF:raw=$file:size:AVERAGE" "CDEF:size=raw,1048576,*" \
"AREA:size#507AAA" -l 0 -M >/dev/null
done
| osuosl/osuosl-mirror-sync | mirror-stats/mirror-size-graph.sh | Shell | gpl-2.0 | 746 |
#! /bin/sh
modprobe ib_core
modprobe ib_uverbs
modprobe ib_addr
modprobe ib_umad
modprobe ib_cm
modprobe ib_mad
# modprobe ib_ipoib
modprobe ib_sa
modprobe iw_cm
modprobe ib_ucm
modprobe rdma_ucm
modprobe rdma_cm
| nminoru/pib | driver/load_ib_modules.sh | Shell | gpl-2.0 | 214 |
#!/bin/sh
if [ $# -gt 0 ]; then
echo $1 > .version
fi
buildid=$(( $1 + 1 ))
zipfile="Chroma.Kernel-r$buildid.zip"
. ./env_setup.sh ${1} || exit 1;
if [ -e .config ]; then
rm .config
fi
cp arch/arm/configs/aosp_defconfig .config >> /dev/null
make aosp_defconfig >> /dev/null
make -j$NUMBEROFCPUS CONFIG_NO_ERROR_ON_MISMATCH=y
cp arch/arm/boot/zImage-dtb ramdisk/
cd ramdisk/
./mkbootfs boot.img-ramdisk | gzip > ramdisk.gz
./mkbootimg --kernel zImage-dtb --cmdline 'console=ttyHSL0,115200,n8 androidboot.hardware=hammerhead user_debug=31 msm_watchdog_v2.enable=1' --base 0x00000000 --pagesize 2048 --ramdisk_offset 0x02900000 --tags_offset 0x02700000 --ramdisk ramdisk.gz --output ../boot.img
rm -rf ramdisk.gz
rm -rf zImage
cd ..
if [ -e arch/arm/boot/zImage ]; then
cp boot.img zip/
rm -rf ramdisk/boot.img
cd zip/
rm -f *.zip
zip -r -9 $zipfile *
rm -f /tmp/*.zip
cp *.zip /tmp
cd ..
else
echo "Something goes wrong aborting!"
return
fi
| artcotto/CharizardX_kernel_hammerhead | build.sh | Shell | gpl-2.0 | 965 |
#!/bin/bash
if [ $# -ne 2 ];then
echo "Usage: $0 <src_1> <src_2>"
exit 1
fi
mv $1 tmp
mv $2 $1
mv tmp $2
exit 0
| chapering/DTIStudy | dataproc_scripts/exchange_file.sh | Shell | gpl-2.0 | 117 |
End of preview. Expand
in Dataset Viewer.
README.md exists but content is empty.
- Downloads last month
- 43