Setup Python 3 in Mac / Ubuntu
Mac OS Python 3 Installation
- Install brew
brew uninstall python
brew uninstall python3
brew install python
brew install python3
sudo easy_install pip
#bug fix if uninstalling six-1.4.1
sudo pip install haxor-news --upgrade --ignore-installed six
pip install virtualenv
pip install virtualenvwrapper
mkdir ~/.virtualenvs
- Open ~/.bashrc and add the follows
export WORKON_HOME=~/.virtualenvs
source /usr/local/bin/virtualenvwrapper.sh
- Work on python3 environment
mkvirtualenv --python=/usr/local/bin/python3 envname
pip3 install {package-name}
- Work on python environment
mkvirtualenv --python=/usr/bin/python envname
pip install -r requirements.txt
Ubuntu python3 installation
# First install pip for Python2. Download the get-pip.py file from https://bootstrap.pypa.io/get-pip.py
curl https://bootstrap.pypa.io/get-pip.py > get-pip.py
chmod 777 get-pip.py
sudo -H ./get-pip.py
sudo apt-get update
sudo apt-get install python3 python3-pip
sudo -H pip3 install virtualenv
pip3 install --upgrade pip
sudo -H pip install virtualenvwrapper
#add the following at ~/.bashrc
#cannot be installed in root
export WORKON_HOME=$HOME/.virtualenvs
VIRTUALENVWRAPPER_PYTHON='/usr/bin/python3'
source /usr/local/bin/virtualenvwrapper.sh
mkdir ~/.virtualenvs
source ~/.bashrc
mkvirtualenv <project name>
workon <env name>
$ deactivate
Useful commands
workon
deactivate
rmvirtualenv
pip freeze > requirements.txt
execute
#scrapy crawl daily-article-spider -o output.json -t json -s JOBDIR=job1
scrapy crawl daily-article-spider -o output.json -t json -a pages=5
crontab -e
1 0 * * * cd /home/dev/projects && bash start.sh
~/start.sh
#!/usr/bin/env bash
nohup ./execute.sh &>nohup.out &
~/execute.sh
#!/usr/bin/env bash
source /usr/local/bin/virtualenvwrapper.sh && source ~/.bashrc && workon coco01-crawler && cd /home/dev/projects/8listings-crawlers/coco01/dailyarticles/dailyarticles && scrapy crawl daily-article-spider -o output.json -t json -a pages=5