refactor(oi): improve data extraction and consolidate documentation
- Fix MQL5 API usage in EA to use correct CopyRates and POSITION_TYPE enums - Refactor scraper data extraction to use drop_duplicates for unique strikes - Consolidate Windows setup guide into main README - Add virtual environment batch files for easier setup and execution - Simplify run_scraper.bat to focus on core execution - Normalize lot calculation to use SymbolInfo.LotsStep()
This commit is contained in:
@@ -358,64 +358,66 @@ void OnNewM1Bar(datetime newBarTime) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void CalculateVolumeEMAFromHistory() {
|
void CalculateVolumeEMAFromHistory() {
|
||||||
double volumeArray[];
|
MqlRates rates[];
|
||||||
ArraySetAsSeries(volumeArray, true);
|
ArraySetAsSeries(rates, true);
|
||||||
CopyVolume(_Symbol, PERIOD_M1, 1, InpVolumeEmaPeriod + 1, volumeArray);
|
int copied = CopyRates(_Symbol, PERIOD_M1, 1, InpVolumeEmaPeriod + 1, rates);
|
||||||
|
if(copied < InpVolumeEmaPeriod + 1) return;
|
||||||
double emaAlpha = 2.0 / (InpVolumeEmaPeriod + 1);
|
|
||||||
double sum = 0;
|
double emaAlpha = 2.0 / (InpVolumeEmaPeriod + 1);
|
||||||
|
double sum = 0;
|
||||||
for(int i = 0; i < InpVolumeEmaPeriod; i++) {
|
|
||||||
sum += volumeArray[i];
|
for(int i = 0; i < InpVolumeEmaPeriod; i++) {
|
||||||
}
|
sum += (double)rates[i].tick_volume;
|
||||||
double avgVolume = sum / InpVolumeEmaPeriod;
|
}
|
||||||
|
double avgVolume = sum / InpVolumeEmaPeriod;
|
||||||
if(VolumeEmaValue == 0) {
|
|
||||||
VolumeEmaValue = avgVolume;
|
if(VolumeEmaValue == 0) {
|
||||||
} else {
|
VolumeEmaValue = avgVolume;
|
||||||
VolumeEmaValue = emaAlpha * avgVolume + (1 - emaAlpha) * VolumeEmaValue;
|
} else {
|
||||||
}
|
VolumeEmaValue = emaAlpha * avgVolume + (1 - emaAlpha) * VolumeEmaValue;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void DetectAbsorptionFromHistory() {
|
void DetectAbsorptionFromHistory() {
|
||||||
double volumeArray[];
|
MqlRates rates[];
|
||||||
ArraySetAsSeries(volumeArray, true);
|
ArraySetAsSeries(rates, true);
|
||||||
CopyVolume(_Symbol, PERIOD_M1, 1, InpAbsorptionBars + 1, volumeArray);
|
int copied = CopyRates(_Symbol, PERIOD_M1, 1, InpAbsorptionBars + 1, rates);
|
||||||
|
if(copied < InpAbsorptionBars + 1) return;
|
||||||
double avgVolume = 0;
|
|
||||||
for(int i = 0; i < InpAbsorptionBars; i++) {
|
double avgVolume = 0;
|
||||||
avgVolume += volumeArray[i];
|
for(int i = 0; i < InpAbsorptionBars; i++) {
|
||||||
}
|
avgVolume += (double)rates[i].tick_volume;
|
||||||
avgVolume /= InpAbsorptionBars;
|
}
|
||||||
|
avgVolume /= InpAbsorptionBars;
|
||||||
double volumeThreshold = avgVolume * InpMinVolumeMultiplier;
|
|
||||||
|
double volumeThreshold = avgVolume * InpMinVolumeMultiplier;
|
||||||
int sellAbsorptionCount = 0;
|
|
||||||
int buyAbsorptionCount = 0;
|
int sellAbsorptionCount = 0;
|
||||||
|
int buyAbsorptionCount = 0;
|
||||||
for(int i = 1; i <= InpAbsorptionBars; i++) {
|
|
||||||
double high = iHigh(_Symbol, PERIOD_M1, i);
|
for(int i = 1; i <= InpAbsorptionBars; i++) {
|
||||||
double low = iLow(_Symbol, PERIOD_M1, i);
|
double high = rates[i].high;
|
||||||
double close = iClose(_Symbol, PERIOD_M1, i);
|
double low = rates[i].low;
|
||||||
double open = iOpen(_Symbol, PERIOD_M1, i);
|
double close = rates[i].close;
|
||||||
int barVolume = (int)iVolume(_Symbol, PERIOD_M1, i);
|
double open = rates[i].open;
|
||||||
|
int barVolume = (int)rates[i].tick_volume;
|
||||||
double barRange = high - low;
|
|
||||||
double barDrift = close - open;
|
double barRange = high - low;
|
||||||
|
double barDrift = close - open;
|
||||||
bool highVolume = barVolume > volumeThreshold;
|
|
||||||
bool lowDrift = barRange < InpMaxPriceDriftPoints * _Point;
|
bool highVolume = barVolume > volumeThreshold;
|
||||||
|
bool lowDrift = barRange < InpMaxPriceDriftPoints * _Point;
|
||||||
if(highVolume && lowDrift && barDrift < 0) {
|
|
||||||
sellAbsorptionCount++;
|
if(highVolume && lowDrift && barDrift < 0) {
|
||||||
}
|
sellAbsorptionCount++;
|
||||||
|
}
|
||||||
if(highVolume && lowDrift && barDrift > 0) {
|
|
||||||
buyAbsorptionCount++;
|
if(highVolume && lowDrift && barDrift > 0) {
|
||||||
}
|
buyAbsorptionCount++;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
int requiredBars = MathCeil(InpAbsorptionBars / 2.0);
|
|
||||||
|
int requiredBars = (int)MathCeil(InpAbsorptionBars / 2.0);
|
||||||
|
|
||||||
if(sellAbsorptionCount >= requiredBars && IsPriceNearPutStrike()) {
|
if(sellAbsorptionCount >= requiredBars && IsPriceNearPutStrike()) {
|
||||||
CurrentAbsorptionState = ABSORPTION_SELL;
|
CurrentAbsorptionState = ABSORPTION_SELL;
|
||||||
@@ -679,8 +681,8 @@ bool IsAtSupport() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ExecuteBuyTrade() {
|
void ExecuteBuyTrade() {
|
||||||
double lotSize = CalculateLotSize(ORDER_TYPE_BUY);
|
double lotSize = CalculateLotSize(POSITION_TYPE_BUY);
|
||||||
if(lotSize <= 0) return;
|
if(lotSize <= 0) return;
|
||||||
|
|
||||||
double sl = 0, tp = 0;
|
double sl = 0, tp = 0;
|
||||||
double nearestPutStrike = GetNearestPutStrike();
|
double nearestPutStrike = GetNearestPutStrike();
|
||||||
@@ -715,8 +717,8 @@ void ExecuteBuyTrade() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ExecuteSellTrade() {
|
void ExecuteSellTrade() {
|
||||||
double lotSize = CalculateLotSize(ORDER_TYPE_SELL);
|
double lotSize = CalculateLotSize(POSITION_TYPE_SELL);
|
||||||
if(lotSize <= 0) return;
|
if(lotSize <= 0) return;
|
||||||
|
|
||||||
double sl = 0, tp = 0;
|
double sl = 0, tp = 0;
|
||||||
double nearestCallStrike = GetNearestCallStrike();
|
double nearestCallStrike = GetNearestCallStrike();
|
||||||
@@ -820,8 +822,8 @@ double CalculateLotSize(ENUM_POSITION_TYPE tradeType) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
double NormalizeLot(double lot) {
|
double NormalizeLot(double lot) {
|
||||||
double step = SymbolInfo.LotStep();
|
double step = SymbolInfo.LotsStep();
|
||||||
return MathFloor(lot / step) * step;
|
return MathFloor(lot / step) * step;
|
||||||
}
|
}
|
||||||
|
|
||||||
void ManagePositions() {
|
void ManagePositions() {
|
||||||
@@ -1037,8 +1039,11 @@ void CreateDashboard() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void UpdateDashboard() {
|
void UpdateDashboard() {
|
||||||
UpdateLabel("DB_Symbol", 20, 45, "Symbol: " + _Symbol, clrWhite, 8);
|
MqlRates rates[];
|
||||||
UpdateLabel("DB_Price", 20, 65, "Price: " + DoubleToString(SpotPrice, 2), clrCyan, 8);
|
int copied = CopyRates(_Symbol, PERIOD_M1, 0, 1, rates);
|
||||||
|
|
||||||
|
UpdateLabel("DB_Symbol", 20, 45, "Symbol: " + _Symbol, clrWhite, 8);
|
||||||
|
UpdateLabel("DB_Price", 20, 65, "Price: " + DoubleToString(SpotPrice, 2), clrCyan, 8);
|
||||||
|
|
||||||
string deltaText = DoubleToString(OrderFlowDeltaPercent, 1) + "%";
|
string deltaText = DoubleToString(OrderFlowDeltaPercent, 1) + "%";
|
||||||
color deltaColor = OrderFlowDeltaPercent > 0 ? clrLime : (OrderFlowDeltaPercent < 0 ? clrRed : clrGray);
|
color deltaColor = OrderFlowDeltaPercent > 0 ? clrLime : (OrderFlowDeltaPercent < 0 ? clrRed : clrGray);
|
||||||
@@ -1063,9 +1068,9 @@ void UpdateDashboard() {
|
|||||||
|
|
||||||
UpdateLabel("DB_Absorption", 20, 105, "Absorption: " + absorptionText, absorptionColor, 8);
|
UpdateLabel("DB_Absorption", 20, 105, "Absorption: " + absorptionText, absorptionColor, 8);
|
||||||
|
|
||||||
int currentVol = CurrentBarVolume > 0 ? CurrentBarVolume : (int)Volume(0);
|
int currentVol = CurrentBarVolume > 0 ? CurrentBarVolume : (copied > 0 ? (int)rates[0].tick_volume : 0);
|
||||||
UpdateLabel("DB_Volume", 20, 125, "Volume: " + IntegerToString(currentVol) +
|
UpdateLabel("DB_Volume", 20, 125, "Volume: " + IntegerToString(currentVol) +
|
||||||
" (Avg: " + IntegerToString((int)VolumeEmaValue) + ")", clrWhite, 8);
|
" (Avg: " + IntegerToString((int)VolumeEmaValue) + ")", clrWhite, 8);
|
||||||
|
|
||||||
string driftText = DoubleToString(PriceDrift / _Point, 1) + " pts";
|
string driftText = DoubleToString(PriceDrift / _Point, 1) + " pts";
|
||||||
color driftColor = PriceDrift < InpMaxPriceDriftPoints * _Point ? clrLime : clrOrange;
|
color driftColor = PriceDrift < InpMaxPriceDriftPoints * _Point ? clrLime : clrOrange;
|
||||||
|
|||||||
@@ -1,179 +1,268 @@
|
|||||||
# CME OI Scraper
|
# CME OI Scraper
|
||||||
|
|
||||||
Python scraper to pull Open Interest data from CME Group QuikStrike and current gold price from investing.com.
|
Python scraper that extracts Open Interest data from CME Group QuikStrike and current gold price from investing.com.
|
||||||
|
|
||||||
## What It Extracts
|
## What It Extracts
|
||||||
|
|
||||||
1. **OI Levels (from CME QuikStrike):**
|
1. **OI Levels (from CME QuikStrike):**
|
||||||
- Top 3 CALL strikes by OI volume
|
- Top 3 CALL strikes by OI volume (unique strikes)
|
||||||
- Top 3 PUT strikes by OI volume
|
- Top 3 PUT strikes by OI volume (unique strikes)
|
||||||
|
|
||||||
2. **Gold Price (from investing.com):**
|
2. **Gold Price (from investing.com):**
|
||||||
- Current gold futures price (e.g., 4345.50)
|
- Current gold futures price (e.g., 4476.50)
|
||||||
|
|
||||||
## Prerequisites
|
## Prerequisites
|
||||||
|
|
||||||
- Python 3.9+
|
- Python 3.9 or higher
|
||||||
- CME Group QuikStrike account with login credentials
|
- CME Group QuikStrike account (free registration at https://www.cmegroup.com)
|
||||||
|
- Windows 10/11 (for batch files) or Linux/macOS
|
||||||
|
|
||||||
## Installation
|
## Quick Start
|
||||||
|
|
||||||
1. Copy environment variables:
|
### Windows
|
||||||
```bash
|
|
||||||
cp .env.example .env
|
|
||||||
```
|
|
||||||
|
|
||||||
2. Edit `.env` and add your CME credentials:
|
1. **Run one-time setup:**
|
||||||
```bash
|
```cmd
|
||||||
CME_USERNAME=your_username
|
cd C:\Path\To\oi_scraper
|
||||||
CME_PASSWORD=your_password
|
setup_env.bat
|
||||||
```
|
```
|
||||||
|
|
||||||
3. Install dependencies:
|
2. **Run the scraper:**
|
||||||
```bash
|
```cmd
|
||||||
pip install -r requirements.txt
|
run_with_venv.bat
|
||||||
playwright install chromium
|
```
|
||||||
```
|
|
||||||
|
|
||||||
## Usage
|
### Linux/macOS
|
||||||
|
|
||||||
### Basic Scraping
|
1. **Setup:**
|
||||||
|
```bash
|
||||||
|
cd /path/to/oi_scraper
|
||||||
|
python3 -m venv venv
|
||||||
|
source venv/bin/activate
|
||||||
|
pip install -r requirements.txt
|
||||||
|
playwright install chromium
|
||||||
|
```
|
||||||
|
|
||||||
```bash
|
2. **Run:**
|
||||||
python main.py
|
```bash
|
||||||
```
|
source venv/bin/activate
|
||||||
|
python main.py
|
||||||
This will:
|
```
|
||||||
- Login to CME QuikStrike
|
|
||||||
- Navigate to OI Heatmap
|
|
||||||
- Extract top 3 CALL and PUT strikes by OI volume
|
|
||||||
- Scrape current gold price from investing.com
|
|
||||||
- Export to `oi_data.csv`
|
|
||||||
|
|
||||||
### Session Persistence
|
|
||||||
|
|
||||||
The scraper automatically saves your login session to `cookies.json`. This means:
|
|
||||||
|
|
||||||
- **First run**: Logs in with your credentials, saves cookies
|
|
||||||
- **Subsequent runs**: Uses saved cookies if session is still valid
|
|
||||||
- **Session expired**: Automatically logs in again and saves new cookies
|
|
||||||
|
|
||||||
Benefits for scheduled runs:
|
|
||||||
- Faster execution (skips login when session is valid)
|
|
||||||
- Reduces login attempts to CME servers
|
|
||||||
- CME sessions typically last several days/weeks
|
|
||||||
|
|
||||||
To force a fresh login, delete `cookies.json`:
|
|
||||||
```bash
|
|
||||||
rm cookies.json
|
|
||||||
```
|
|
||||||
|
|
||||||
### Output Format
|
|
||||||
|
|
||||||
The CSV output is compatible with the EA's `LoadOIFromCSV()` and `LoadFuturePriceFromCSV()` functions:
|
|
||||||
|
|
||||||
```csv
|
|
||||||
Type,Strike,OI
|
|
||||||
CALL,4345,155398
|
|
||||||
CALL,4350,229137
|
|
||||||
CALL,4360,90649
|
|
||||||
PUT,4300,227936
|
|
||||||
PUT,4290,270135
|
|
||||||
PUT,4280,65839
|
|
||||||
|
|
||||||
[Price]
|
|
||||||
FuturePrice,4345.50
|
|
||||||
```
|
|
||||||
|
|
||||||
**Note:** The `[Price]` section contains the current gold futures price scraped from investing.com. The EA reads this value for Delta calculation.
|
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
Edit `.env` to customize:
|
### Edit `.env` File
|
||||||
|
|
||||||
- `PRODUCT_URL` - QuikStrike product page URL (requires login)
|
Copy and edit the environment file:
|
||||||
- `CME_LOGIN_URL` - CME login page URL (default: SSO URL)
|
|
||||||
- `TOP_N_STRIKES` - Number of top strikes to export (default: 3)
|
|
||||||
- `HEADLESS` - Run browser in headless mode (default: false for debugging)
|
|
||||||
- `CSV_OUTPUT_PATH` - Output CSV file path
|
|
||||||
- `TIMEOUT_SECONDS` - Page load timeout
|
|
||||||
|
|
||||||
### Available Products
|
```cmd
|
||||||
|
copy .env.example .env
|
||||||
**Gold (XAUUSD/COMEX Gold - OG|GC):**
|
notepad .env
|
||||||
```
|
|
||||||
PRODUCT_URL=https://cmegroup.quikstrike.net/User/QuikStrikeView.aspx?pid=40&viewitemid=IntegratedOpenInterestTool
|
|
||||||
```
|
```
|
||||||
|
|
||||||
**Silver:**
|
Required settings:
|
||||||
```
|
```env
|
||||||
PRODUCT_URL=https://cmegroup.quikstrike.net/User/QuikStrikeView.aspx?pid=41&viewitemid=IntegratedOpenInterestTool
|
CME_USERNAME=your_cme_username
|
||||||
|
CME_PASSWORD=your_cme_password
|
||||||
```
|
```
|
||||||
|
|
||||||
**SOFR (3M SOFR):**
|
Optional settings:
|
||||||
```
|
```env
|
||||||
PRODUCT_URL=https://cmegroup.quikstrike.net/User/QuikStrikeView.aspx?pid=476&viewitemid=IntegratedOpenInterestTool
|
# Number of top strikes to export (default: 3)
|
||||||
|
TOP_N_STRIKES=3
|
||||||
|
|
||||||
|
# Run browser without window (default: false)
|
||||||
|
HEADLESS=false
|
||||||
|
|
||||||
|
# Page timeout in seconds (default: 30)
|
||||||
|
TIMEOUT_SECONDS=30
|
||||||
|
|
||||||
|
# Output CSV path
|
||||||
|
CSV_OUTPUT_PATH=./oi_data.csv
|
||||||
|
|
||||||
|
# Logging level: DEBUG, INFO, WARNING, ERROR
|
||||||
|
LOG_LEVEL=INFO
|
||||||
```
|
```
|
||||||
|
|
||||||
**Note:** You must be logged in to access QuikStrike data. The scraper will automatically login using credentials from `.env`.
|
## Output Format
|
||||||
|
|
||||||
|
The scraper exports to `oi_data.csv`:
|
||||||
|
|
||||||
|
```csv
|
||||||
|
Type,Strike,OI
|
||||||
|
CALL,4375.0,147
|
||||||
|
CALL,4450.0,173
|
||||||
|
CALL,4500.0,176
|
||||||
|
PUT,4435.0,49
|
||||||
|
PUT,4400.0,102
|
||||||
|
PUT,4515.0,150
|
||||||
|
|
||||||
|
[Price]
|
||||||
|
FuturePrice,4467.8
|
||||||
|
```
|
||||||
|
|
||||||
|
The `[Price]` section contains the current gold futures price scraped from investing.com.
|
||||||
|
|
||||||
|
## Session Persistence
|
||||||
|
|
||||||
|
The scraper saves login sessions to `cookies.json`:
|
||||||
|
|
||||||
|
- **First run:** Logs in with credentials, saves cookies
|
||||||
|
- **Subsequent runs:** Uses saved cookies if session is valid
|
||||||
|
- **Session expired:** Automatically re-logs in and saves new cookies
|
||||||
|
|
||||||
|
This makes scheduled runs faster and reduces login attempts to CME servers.
|
||||||
|
|
||||||
|
To force a fresh login:
|
||||||
|
```cmd
|
||||||
|
del cookies.json
|
||||||
|
```
|
||||||
|
|
||||||
## Integration with EA
|
## Integration with EA
|
||||||
|
|
||||||
The EA reads OI data from CSV when `InpOISource = OI_SOURCE_CSV_FILE`.
|
The EA reads OI data from CSV when configured:
|
||||||
|
```mql5
|
||||||
|
input ENUM_OI_SOURCE InpOISource = OI_SOURCE_CSV_FILE;
|
||||||
|
```
|
||||||
|
|
||||||
Place the generated `oi_data.csv` in MetaTrader's `MQL5/Files` directory.
|
Copy `oi_data.csv` to your MT5 `MQL5/Files` directory:
|
||||||
|
```
|
||||||
|
C:\Users\YourUsername\AppData\Roaming\MetaQuotes\Terminal\Common\MQL5\Files\oi_data.csv
|
||||||
|
```
|
||||||
|
|
||||||
## Scheduling
|
## Automatic Daily Scheduling
|
||||||
|
|
||||||
Use cron or Windows Task Scheduler to run periodically:
|
### Windows Task Scheduler
|
||||||
|
|
||||||
|
1. **Create scheduled task:**
|
||||||
|
- Open Task Scheduler (`taskschd.msc`)
|
||||||
|
- Click "Create Task"
|
||||||
|
|
||||||
|
2. **Configure General tab:**
|
||||||
|
- Name: `CME OI Scraper - Daily`
|
||||||
|
- ✅ Run whether user is logged on or not
|
||||||
|
- ✅ Run with highest privileges
|
||||||
|
|
||||||
|
3. **Configure Triggers tab:**
|
||||||
|
- New → On a schedule → Daily
|
||||||
|
- Start time: 9:00 AM (or your preferred time)
|
||||||
|
- ✅ Enabled
|
||||||
|
|
||||||
|
4. **Configure Actions tab:**
|
||||||
|
- Action: Start a program
|
||||||
|
- Program/script:
|
||||||
|
```
|
||||||
|
C:\Path\To\oi_scraper\run_scheduled.bat
|
||||||
|
```
|
||||||
|
- Start in:
|
||||||
|
```
|
||||||
|
C:\Path\To\oi_scraper
|
||||||
|
```
|
||||||
|
|
||||||
|
5. **Click OK to save**
|
||||||
|
|
||||||
|
### Linux/macOS (cron)
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Run every hour
|
# Edit crontab
|
||||||
0 * * * * cd /path/to/oi_scraper && python main.py
|
crontab -e
|
||||||
|
|
||||||
|
# Add line to run every day at 9 AM
|
||||||
|
0 9 * * * cd /path/to/oi_scraper && /path/to/venv/bin/python main.py
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Batch Files Reference
|
||||||
|
|
||||||
|
| File | Purpose |
|
||||||
|
|------|---------|
|
||||||
|
| `setup_env.bat` | One-time setup (creates virtual environment) |
|
||||||
|
| `run_with_venv.bat` | Manual run with visible window |
|
||||||
|
| `run_scheduled.bat` | For Task Scheduler (no window, no pause) |
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
**Login fails:**
|
### Module Not Found Errors
|
||||||
|
|
||||||
|
**Error:** `ModuleNotFoundError: No module named 'playwright'`
|
||||||
|
|
||||||
|
**Solution:**
|
||||||
|
```cmd
|
||||||
|
run_with_venv.bat
|
||||||
|
```
|
||||||
|
|
||||||
|
The virtual environment ensures all dependencies are isolated.
|
||||||
|
|
||||||
|
### Login Fails
|
||||||
|
|
||||||
- Verify credentials in `.env`
|
- Verify credentials in `.env`
|
||||||
- Check if CME requires 2FA
|
- Check if CME requires 2FA (manual intervention needed)
|
||||||
- Set `HEADLESS=false` to see what's happening
|
- Set `HEADLESS=false` to see browser activity
|
||||||
- Check screenshots: `login_failed.png`, `login_error.png`, `login_success.png`
|
- Check screenshots: `login_failed.png`, `login_error.png`
|
||||||
|
|
||||||
**No data extracted:**
|
### No Data Extracted
|
||||||
- Check if table structure changed
|
|
||||||
- Increase `TIMEOUT_SECONDS`
|
|
||||||
- Check logs for detailed errors
|
|
||||||
- Screenshot saved as `login_debug.png` or `login_failed.png`
|
|
||||||
|
|
||||||
**Login page selectors changed:**
|
- Check if CME table structure changed
|
||||||
- If the scraper can't find username/password inputs, CME may have updated their login page
|
- Increase `TIMEOUT_SECONDS=60` in `.env`
|
||||||
- Update the selectors in `login_to_cme()` function in `main.py`:
|
- Check logs for errors
|
||||||
```python
|
- Screenshot saved as `login_debug.png`
|
||||||
# Example: update to match current CME login form
|
|
||||||
page.fill('input[id="username"]', CME_USERNAME)
|
|
||||||
page.fill('input[id="password"]', CME_PASSWORD)
|
|
||||||
page.click('button[type="submit"]')
|
|
||||||
```
|
|
||||||
|
|
||||||
**Browser issues:**
|
### Browser Issues
|
||||||
- Install Chromium dependencies: `playwright install chromium`
|
|
||||||
- Try different browser: Change `p.chromium.launch()` to `p.firefox.launch()`
|
```cmd
|
||||||
|
# Reinstall Chromium
|
||||||
|
python -m playwright install chromium
|
||||||
|
```
|
||||||
|
|
||||||
|
### Session Expires Frequently
|
||||||
|
|
||||||
|
Delete cookies to force fresh login:
|
||||||
|
```cmd
|
||||||
|
del cookies.json
|
||||||
|
```
|
||||||
|
|
||||||
|
### Check Python Path Issues (Windows)
|
||||||
|
|
||||||
|
```cmd
|
||||||
|
# Check which Python is being used
|
||||||
|
where python
|
||||||
|
|
||||||
|
# Use Python launcher
|
||||||
|
py -3 main.py
|
||||||
|
|
||||||
|
# Or use the virtual environment
|
||||||
|
run_with_venv.bat
|
||||||
|
```
|
||||||
|
|
||||||
|
## Finding Product IDs
|
||||||
|
|
||||||
|
To scrape other instruments (Silver, Crude Oil, etc.):
|
||||||
|
|
||||||
|
1. Visit CME QuikStrike OI Heatmap
|
||||||
|
2. Login to your CME account
|
||||||
|
3. Select a product from the dropdown
|
||||||
|
4. The URL updates with the `pid` parameter
|
||||||
|
5. Note: This scraper is configured for Gold by default
|
||||||
|
|
||||||
## Notes
|
## Notes
|
||||||
|
|
||||||
- The scraper targets the OI Heatmap table structure
|
- Targets the OI Heatmap table structure
|
||||||
- Only exports top N strikes by OI volume
|
- Exports top N unique strikes by OI volume
|
||||||
- Login session is not persisted (login each run)
|
- Uses session cookies for faster subsequent runs
|
||||||
- Cookies could be saved for faster subsequent runs
|
- CME sessions typically last several days to weeks
|
||||||
|
- Virtual environment recommended to avoid Python path conflicts
|
||||||
|
|
||||||
### Finding Product IDs
|
## Files
|
||||||
|
|
||||||
To find product IDs for other instruments:
|
```
|
||||||
1. Visit https://www.cmegroup.com/tools-information/quikstrike/open-interest-heatmap.html
|
oi_scraper/
|
||||||
2. Login to your CME account
|
├── main.py # Main scraper script
|
||||||
3. Select a product from the "Products" menu
|
├── requirements.txt # Python dependencies
|
||||||
4. The URL will update with the `pid` parameter
|
├── .env.example # Environment template
|
||||||
5. Copy that URL to your `.env` file
|
├── .env # Your credentials (create from example)
|
||||||
|
├── setup_env.bat # Windows: Create virtual environment
|
||||||
Example: `https://www.cmegroup.com/tools-information/quikstrike/open-interest-heatmap.html?pid=40` (Gold)
|
├── run_with_venv.bat # Windows: Manual run
|
||||||
|
├── run_scheduled.bat # Windows: Task Scheduler run
|
||||||
|
├── oi_data.csv # Output file (generated)
|
||||||
|
├── cookies.json # Session cookies (generated)
|
||||||
|
└── scraper.log # Log file (generated)
|
||||||
|
```
|
||||||
|
|||||||
@@ -1,658 +0,0 @@
|
|||||||
# CME OI Scraper - Windows Setup Guide
|
|
||||||
|
|
||||||
Complete guide for setting up and running the CME OI scraper on Windows with automatic daily updates.
|
|
||||||
|
|
||||||
## Table of Contents
|
|
||||||
|
|
||||||
- [Prerequisites](#prerequisites)
|
|
||||||
- [Installation](#installation)
|
|
||||||
- [Configuration](#configuration)
|
|
||||||
- [Manual Testing](#manual-testing)
|
|
||||||
- [Automatic Daily Updates](#automatic-daily-updates)
|
|
||||||
- [MetaTrader 5 Integration](#metatrader-5-integration)
|
|
||||||
- [Troubleshooting](#troubleshooting)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
### Required Software
|
|
||||||
|
|
||||||
1. **Python 3.9 or higher**
|
|
||||||
- Download: https://www.python.org/downloads/
|
|
||||||
- During installation: ✅ Check "Add Python to PATH"
|
|
||||||
|
|
||||||
2. **CME Group QuikStrike Account**
|
|
||||||
- Free account required: https://www.cmegroup.com/
|
|
||||||
- Register for QuikStrike access
|
|
||||||
- Save your username and password
|
|
||||||
|
|
||||||
3. **MetaTrader 5** (for EA integration)
|
|
||||||
- Download: https://www.metatrader5.com/
|
|
||||||
- Install on your Windows machine
|
|
||||||
|
|
||||||
### Verify Python Installation
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
python --version
|
|
||||||
```
|
|
||||||
|
|
||||||
Expected output: `Python 3.9.x` or higher
|
|
||||||
|
|
||||||
If not found, install Python or use `py` or `python3` commands.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
### Step 1: Navigate to Scraper Directory
|
|
||||||
|
|
||||||
Open Command Prompt (cmd) and navigate:
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
cd C:\Users\YourUsername\Gitea\MeanRevisionEA\oi_scraper
|
|
||||||
```
|
|
||||||
|
|
||||||
Replace `YourUsername` with your actual Windows username.
|
|
||||||
|
|
||||||
### Step 2: Create Environment File
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
copy .env.example .env
|
|
||||||
```
|
|
||||||
|
|
||||||
### Step 3: Edit .env File
|
|
||||||
|
|
||||||
Open `.env` with Notepad:
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
notepad .env
|
|
||||||
```
|
|
||||||
|
|
||||||
Update with your credentials:
|
|
||||||
|
|
||||||
```env
|
|
||||||
# CME Group QuikStrike Login Credentials
|
|
||||||
CME_USERNAME=your_actual_username_here
|
|
||||||
CME_PASSWORD=your_actual_password_here
|
|
||||||
|
|
||||||
# Product Configuration (Gold)
|
|
||||||
PRODUCT_URL=https://cmegroup.quikstrike.net/User/QuikStrikeView.aspx?pid=40&viewitemid=IntegratedOpenInterestTool
|
|
||||||
|
|
||||||
# Output Settings
|
|
||||||
CSV_OUTPUT_PATH=./oi_data.csv
|
|
||||||
TOP_N_STRIKES=3
|
|
||||||
|
|
||||||
# Scraping Settings
|
|
||||||
HEADLESS=false
|
|
||||||
TIMEOUT_SECONDS=30
|
|
||||||
RETRY_ATTEMPTS=3
|
|
||||||
|
|
||||||
# Logging
|
|
||||||
LOG_LEVEL=INFO
|
|
||||||
```
|
|
||||||
|
|
||||||
**Save and close** (Ctrl+S, then Alt+F4).
|
|
||||||
|
|
||||||
### Step 4: Install Python Dependencies
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
pip install -r requirements.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
Expected output: Successfully installed playwright, python-dotenv, pandas
|
|
||||||
|
|
||||||
### Step 5: Install Playwright Browser
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
playwright install chromium
|
|
||||||
```
|
|
||||||
|
|
||||||
Expected output: Downloading Chromium... [progress bar]
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Configuration
|
|
||||||
|
|
||||||
### Available Products
|
|
||||||
|
|
||||||
**Gold (XAUUSD/COMEX Gold - OG|GC):**
|
|
||||||
```env
|
|
||||||
PRODUCT_URL=https://cmegroup.quikstrike.net/User/QuikStrikeView.aspx?pid=40&viewitemid=IntegratedOpenInterestTool
|
|
||||||
```
|
|
||||||
|
|
||||||
**Silver:**
|
|
||||||
```env
|
|
||||||
PRODUCT_URL=https://cmegroup.quikstrike.net/User/QuikStrikeView.aspx?pid=41&viewitemid=IntegratedOpenInterestTool
|
|
||||||
```
|
|
||||||
|
|
||||||
**SOFR (3M SOFR):**
|
|
||||||
```env
|
|
||||||
PRODUCT_URL=https://cmegroup.quikstrike.net/User/QuikStrikeView.aspx?pid=476&viewitemid=IntegratedOpenInterestTool
|
|
||||||
```
|
|
||||||
|
|
||||||
### Configuration Options
|
|
||||||
|
|
||||||
| Setting | Description | Default |
|
|
||||||
|----------|-------------|---------|
|
|
||||||
| `TOP_N_STRIKES` | Number of top strikes to export | 3 |
|
|
||||||
| `HEADLESS` | Run browser without window (true/false) | false |
|
|
||||||
| `TIMEOUT_SECONDS` | Page load timeout in seconds | 30 |
|
|
||||||
| `CSV_OUTPUT_PATH` | Output CSV file path | ./oi_data.csv |
|
|
||||||
| `LOG_LEVEL` | DEBUG, INFO, WARNING, ERROR | INFO |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Manual Testing
|
|
||||||
|
|
||||||
### Run Scraper Manually
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
python main.py
|
|
||||||
```
|
|
||||||
|
|
||||||
Expected output:
|
|
||||||
```
|
|
||||||
INFO:__main__:Cookies loaded from file
|
|
||||||
INFO:__main__:Using existing session (cookies)
|
|
||||||
INFO:__main__:Navigating to OI Heatmap: https://...
|
|
||||||
INFO:__main__:Extracting OI data from Gold matrix table...
|
|
||||||
INFO:__main__:Extracted 6 OI levels
|
|
||||||
INFO:__main__:Exported OI data to ./oi_data.csv
|
|
||||||
```
|
|
||||||
|
|
||||||
### Check Output
|
|
||||||
|
|
||||||
**1. Verify CSV created:**
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
dir oi_data.csv
|
|
||||||
```
|
|
||||||
|
|
||||||
**2. View CSV content:**
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
notepad oi_data.csv
|
|
||||||
```
|
|
||||||
|
|
||||||
Expected format:
|
|
||||||
```csv
|
|
||||||
Type,Strike,OI
|
|
||||||
CALL,4400,6193
|
|
||||||
CALL,4300,3826
|
|
||||||
CALL,4350,1983
|
|
||||||
PUT,4400,5559
|
|
||||||
PUT,4300,2988
|
|
||||||
PUT,4350,1214
|
|
||||||
```
|
|
||||||
|
|
||||||
### Check Logs
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
type scraper.log
|
|
||||||
```
|
|
||||||
|
|
||||||
Or view in Notepad:
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
notepad scraper.log
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Automatic Daily Updates
|
|
||||||
|
|
||||||
### Option 1: Windows Task Scheduler (Recommended)
|
|
||||||
|
|
||||||
#### Step 1: Create Batch File Wrapper
|
|
||||||
|
|
||||||
Create `run_scraper.bat` in the oi_scraper directory:
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
@echo off
|
|
||||||
cd /d C:\Users\YourUsername\Gitea\MeanRevisionEA\oi_scraper
|
|
||||||
|
|
||||||
echo Starting CME OI Scraper at %date% %time% >> scraper.log
|
|
||||||
echo ---------------------------------------- >> scraper.log
|
|
||||||
|
|
||||||
python main.py >> scraper.log 2>&1
|
|
||||||
|
|
||||||
if %ERRORLEVEL% EQU 0 (
|
|
||||||
echo %date% %time%: Scraper completed successfully >> scraper.log
|
|
||||||
) else (
|
|
||||||
echo %date% %time%: Scraper failed with error %ERRORLEVEL% >> scraper.log
|
|
||||||
)
|
|
||||||
|
|
||||||
echo ---------------------------------------- >> scraper.log
|
|
||||||
```
|
|
||||||
|
|
||||||
Replace `YourUsername` with your actual username.
|
|
||||||
|
|
||||||
#### Step 2: Open Task Scheduler
|
|
||||||
|
|
||||||
Press `Win + R`, type `taskschd.msc`, press Enter
|
|
||||||
|
|
||||||
Or: Start → Windows Administrative Tools → Task Scheduler
|
|
||||||
|
|
||||||
#### Step 3: Create Task
|
|
||||||
|
|
||||||
1. Click **"Create Basic Task"** on right sidebar
|
|
||||||
2. **Name:** `CME OI Scraper - Daily`
|
|
||||||
3. **Description:** `Update OI data from CME QuikStrike every day at 9 AM`
|
|
||||||
4. Click **Next**
|
|
||||||
|
|
||||||
#### Step 4: Set Trigger
|
|
||||||
|
|
||||||
1. **Trigger:** Select "Daily"
|
|
||||||
2. **Start date:** Today's date
|
|
||||||
3. **Start time:** 9:00:00 AM (or your preferred time)
|
|
||||||
4. Click **Next**
|
|
||||||
|
|
||||||
#### Step 5: Set Action
|
|
||||||
|
|
||||||
1. **Action:** Select "Start a program"
|
|
||||||
2. **Program/script:**
|
|
||||||
```
|
|
||||||
C:\Users\YourUsername\Gitea\MeanRevisionEA\oi_scraper\run_scraper.bat
|
|
||||||
```
|
|
||||||
3. **Start in (optional):**
|
|
||||||
```
|
|
||||||
C:\Users\YourUsername\Gitea\MeanRevisionEA\oi_scraper
|
|
||||||
```
|
|
||||||
4. Click **Next**
|
|
||||||
|
|
||||||
#### Step 6: Finish
|
|
||||||
|
|
||||||
1. Review settings
|
|
||||||
2. Check "Open the Properties dialog for this task when I click Finish"
|
|
||||||
3. Click **Finish**
|
|
||||||
|
|
||||||
#### Step 7: Configure Advanced Settings (Optional)
|
|
||||||
|
|
||||||
In the Properties dialog:
|
|
||||||
|
|
||||||
- **General tab:**
|
|
||||||
- ✅ Run whether user is logged on or not
|
|
||||||
- ✅ Do not store password (if using Windows authentication)
|
|
||||||
- ✅ Run with highest privileges
|
|
||||||
|
|
||||||
- **Conditions tab:**
|
|
||||||
- ✅ Start the task only if the computer is on AC power
|
|
||||||
- ✅ Stop if the computer switches to battery power
|
|
||||||
- ✅ Wake the computer to run this task
|
|
||||||
|
|
||||||
- **Settings tab:**
|
|
||||||
- ✅ Allow task to be run on demand
|
|
||||||
- ❌ Stop the task if it runs longer than: 30 minutes
|
|
||||||
- ✅ If the task fails, restart every: 5 minutes (up to 3 times)
|
|
||||||
|
|
||||||
Click **OK** to save settings.
|
|
||||||
|
|
||||||
#### Step 8: Test Task
|
|
||||||
|
|
||||||
1. In Task Scheduler, find "CME OI Scraper - Daily"
|
|
||||||
2. Right-click → **Run**
|
|
||||||
3. Check `scraper.log` after a minute:
|
|
||||||
```cmd
|
|
||||||
type scraper.log
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Option 2: PowerShell Script (Advanced)
|
|
||||||
|
|
||||||
#### Step 1: Create PowerShell Script
|
|
||||||
|
|
||||||
Save as `run_scraper.ps1`:
|
|
||||||
|
|
||||||
```powershell
|
|
||||||
# Script configuration
|
|
||||||
$scriptPath = "C:\Users\YourUsername\Gitea\MeanRevisionEA\oi_scraper"
|
|
||||||
$logFile = "$scriptPath\scraper.log"
|
|
||||||
$timestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss"
|
|
||||||
|
|
||||||
# Navigate to script directory
|
|
||||||
cd $scriptPath
|
|
||||||
|
|
||||||
try {
|
|
||||||
# Run Python scraper
|
|
||||||
Write-Output "$timestamp: Starting CME OI Scraper" | Add-Content $logFile
|
|
||||||
& python main.py *>> $logFile 2>&1
|
|
||||||
|
|
||||||
# Check if CSV was created
|
|
||||||
if (Test-Path "oi_data.csv") {
|
|
||||||
$fileInfo = Get-Item "oi_data.csv"
|
|
||||||
Write-Output "$timestamp: Scraper completed successfully (CSV updated: $($fileInfo.LastWriteTime))" | Add-Content $logFile
|
|
||||||
} else {
|
|
||||||
Write-Output "$timestamp: WARNING - CSV file not created" | Add-Content $logFile
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
$errorMsg = $_.Exception.Message
|
|
||||||
Write-Output "$timestamp: ERROR - $errorMsg" | Add-Content $logFile
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Step 2: Update Task Scheduler to Use PowerShell
|
|
||||||
|
|
||||||
Same steps as Option 1, but:
|
|
||||||
|
|
||||||
- **Program/script:** `powershell.exe`
|
|
||||||
- **Add arguments:**
|
|
||||||
```
|
|
||||||
-ExecutionPolicy Bypass -File "C:\Users\YourUsername\Gitea\MeanRevisionEA\oi_scraper\run_scraper.ps1"
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## MetaTrader 5 Integration
|
|
||||||
|
|
||||||
### Find MT5 Files Directory
|
|
||||||
|
|
||||||
MT5 data directory location:
|
|
||||||
|
|
||||||
```
|
|
||||||
C:\Users\YourUsername\AppData\Roaming\MetaQuotes\Terminal\[Terminal_ID]\MQL5\Files\
|
|
||||||
```
|
|
||||||
|
|
||||||
**To find your Terminal_ID:**
|
|
||||||
|
|
||||||
1. Open MT5
|
|
||||||
2. Click **File** → **Open Data Folder**
|
|
||||||
3. Navigate to `Terminal\[Your_Terminal_ID]\MQL5\Files\`
|
|
||||||
|
|
||||||
### Update Batch File to Copy to MT5
|
|
||||||
|
|
||||||
Edit `run_scraper.bat`:
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
@echo off
|
|
||||||
cd /d C:\Users\YourUsername\Gitea\MeanRevisionEA\oi_scraper
|
|
||||||
|
|
||||||
echo Starting CME OI Scraper at %date% %time% >> scraper.log
|
|
||||||
echo ---------------------------------------- >> scraper.log
|
|
||||||
|
|
||||||
python main.py >> scraper.log 2>&1
|
|
||||||
|
|
||||||
if %ERRORLEVEL% EQU 0 (
|
|
||||||
if exist oi_data.csv (
|
|
||||||
echo Copying OI data to MT5... >> scraper.log
|
|
||||||
copy oi_data.csv "C:\Users\YourUsername\AppData\Roaming\MetaQuotes\Terminal\[Your_Terminal_ID]\MQL5\Files\oi_data.csv"
|
|
||||||
|
|
||||||
if %ERRORLEVEL% EQU 0 (
|
|
||||||
echo %date% %time%: Scraper completed - OI data copied to MT5 >> scraper.log
|
|
||||||
) else (
|
|
||||||
echo %date% %time%: ERROR - Failed to copy to MT5 >> scraper.log
|
|
||||||
)
|
|
||||||
) else (
|
|
||||||
echo %date% %time%: ERROR - oi_data.csv not found >> scraper.log
|
|
||||||
)
|
|
||||||
) else (
|
|
||||||
echo %date% %time%: ERROR - Scraper failed with error %ERRORLEVEL% >> scraper.log
|
|
||||||
)
|
|
||||||
|
|
||||||
echo ---------------------------------------- >> scraper.log
|
|
||||||
```
|
|
||||||
|
|
||||||
Replace `[Your_Terminal_ID]` with your actual MT5 terminal ID.
|
|
||||||
|
|
||||||
### Update EA Configuration
|
|
||||||
|
|
||||||
In your EA (`OI_MeanReversion_Pro_XAUUSD_A.mq5`), set:
|
|
||||||
|
|
||||||
```mql5
|
|
||||||
input ENUM_OI_SOURCE InpOISource = OI_SOURCE_CSV_FILE; // Load from CSV file
|
|
||||||
```
|
|
||||||
|
|
||||||
The EA will automatically read `oi_data.csv` from its Files directory.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Python Not Found
|
|
||||||
|
|
||||||
**Error:** `'python' is not recognized as an internal or external command`
|
|
||||||
|
|
||||||
**Solutions:**
|
|
||||||
|
|
||||||
1. Use full path to Python:
|
|
||||||
```cmd
|
|
||||||
C:\Users\YourUsername\AppData\Local\Programs\Python\Python312\python.exe main.py
|
|
||||||
```
|
|
||||||
|
|
||||||
2. Use `py` launcher:
|
|
||||||
```cmd
|
|
||||||
py main.py
|
|
||||||
```
|
|
||||||
|
|
||||||
3. Reinstall Python with "Add to PATH" option
|
|
||||||
|
|
||||||
### Module Import Errors
|
|
||||||
|
|
||||||
**Error:** `ModuleNotFoundError: No module named 'playwright'`
|
|
||||||
|
|
||||||
**Solution:**
|
|
||||||
```cmd
|
|
||||||
pip install -r requirements.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
### Login Fails
|
|
||||||
|
|
||||||
**Error:** `Login failed - still on login page`
|
|
||||||
|
|
||||||
**Solutions:**
|
|
||||||
|
|
||||||
1. Check credentials in `.env` file:
|
|
||||||
```cmd
|
|
||||||
notepad .env
|
|
||||||
```
|
|
||||||
|
|
||||||
2. Check login screenshots:
|
|
||||||
- `login_failed.png` - Shows login page
|
|
||||||
- `login_error.png` - Shows error during login
|
|
||||||
- `login_success.png` - Confirms successful login
|
|
||||||
|
|
||||||
3. Manually test login at: https://www.cmegroup.com/
|
|
||||||
|
|
||||||
4. Check if 2FA is required (CME may require additional authentication)
|
|
||||||
|
|
||||||
### No Data Extracted
|
|
||||||
|
|
||||||
**Warning:** `No CALL OI data extracted` or `No PUT OI data extracted`
|
|
||||||
|
|
||||||
**Solutions:**
|
|
||||||
|
|
||||||
1. Check if you're logged in:
|
|
||||||
- Delete `cookies.json` to force fresh login
|
|
||||||
- Run scraper manually with `HEADLESS=false` in `.env`
|
|
||||||
|
|
||||||
2. Check if page structure changed:
|
|
||||||
- View screenshots to see actual page content
|
|
||||||
- Check if Gold product URL is correct
|
|
||||||
|
|
||||||
3. Increase timeout:
|
|
||||||
```env
|
|
||||||
TIMEOUT_SECONDS=60
|
|
||||||
```
|
|
||||||
|
|
||||||
### Task Not Running
|
|
||||||
|
|
||||||
**Issue:** Task Scheduler doesn't execute the task
|
|
||||||
|
|
||||||
**Solutions:**
|
|
||||||
|
|
||||||
1. Check task history:
|
|
||||||
- Task Scheduler → Right-click task → Properties → History tab
|
|
||||||
- Look for errors in the log
|
|
||||||
|
|
||||||
2. Test manually:
|
|
||||||
- Right-click task → Run
|
|
||||||
- Check `scraper.log` for output
|
|
||||||
|
|
||||||
3. Check account permissions:
|
|
||||||
- Ensure task is set to run with your Windows account
|
|
||||||
- Check "Run whether user is logged on or not"
|
|
||||||
|
|
||||||
4. Check Windows Event Viewer:
|
|
||||||
- Event Viewer → Windows Logs → Application
|
|
||||||
- Look for Task Scheduler errors
|
|
||||||
|
|
||||||
### Session Expiration
|
|
||||||
|
|
||||||
**Issue:** Session expires after some time
|
|
||||||
|
|
||||||
**Solution:**
|
|
||||||
The scraper will automatically re-login when cookies expire. No manual action needed.
|
|
||||||
|
|
||||||
To force fresh login:
|
|
||||||
```cmd
|
|
||||||
del cookies.json
|
|
||||||
```
|
|
||||||
|
|
||||||
### Check Logs
|
|
||||||
|
|
||||||
**View recent logs:**
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
type scraper.log | more
|
|
||||||
```
|
|
||||||
|
|
||||||
**View last 20 lines:**
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
powershell "Get-Content scraper.log -Tail 20"
|
|
||||||
```
|
|
||||||
|
|
||||||
**Search for errors:**
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
findstr /C:"ERROR" scraper.log
|
|
||||||
```
|
|
||||||
|
|
||||||
### Verify CSV Output
|
|
||||||
|
|
||||||
**Check if CSV is valid:**
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
python -c "import pandas as pd; print(pd.read_csv('oi_data.csv'))"
|
|
||||||
```
|
|
||||||
|
|
||||||
**Check file size:**
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
dir oi_data.csv
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Advanced Options
|
|
||||||
|
|
||||||
### Run Multiple Times Per Day
|
|
||||||
|
|
||||||
**Edit Task Scheduler Trigger:**
|
|
||||||
|
|
||||||
1. Open task properties → Triggers tab
|
|
||||||
2. Edit existing trigger → Click "New" to add additional
|
|
||||||
3. Set different times:
|
|
||||||
- 9:00 AM
|
|
||||||
- 12:00 PM
|
|
||||||
- 3:00 PM
|
|
||||||
- 6:00 PM
|
|
||||||
|
|
||||||
### Run on Market Days Only
|
|
||||||
|
|
||||||
**Create separate batch file:**
|
|
||||||
|
|
||||||
```cmd
|
|
||||||
@echo off
|
|
||||||
cd /d C:\Users\YourUsername\Gitea\MeanRevisionEA\oi_scraper
|
|
||||||
|
|
||||||
REM Check if today is weekday (1=Monday, 5=Friday)
|
|
||||||
for /f "skip=1 tokens=*" %%a in ('wmic path win32_localtime get dayofweek /value') do set DAY=%%a
|
|
||||||
|
|
||||||
if %DAY% LSS 1 goto END
|
|
||||||
if %DAY% GTR 5 goto END
|
|
||||||
|
|
||||||
REM Run scraper
|
|
||||||
python main.py >> scraper.log 2>&1
|
|
||||||
|
|
||||||
:END
|
|
||||||
```
|
|
||||||
|
|
||||||
### Email Notifications
|
|
||||||
|
|
||||||
**Use PowerShell to send email on completion:**
|
|
||||||
|
|
||||||
```powershell
|
|
||||||
# Add to run_scraper.ps1 at the end
|
|
||||||
$smtpServer = "smtp.gmail.com"
|
|
||||||
$smtpPort = 587
|
|
||||||
$smtpUser = "your_email@gmail.com"
|
|
||||||
$smtpPass = "your_password"
|
|
||||||
$from = "CME OI Scraper <your_email@gmail.com>"
|
|
||||||
$to = "your_email@gmail.com"
|
|
||||||
$subject = "CME OI Scraper - %date%"
|
|
||||||
|
|
||||||
if ($errorOccurred) {
|
|
||||||
$body = "CME OI Scraper failed. Check logs for details."
|
|
||||||
} else {
|
|
||||||
$body = "CME OI Scraper completed successfully.`n`nUpdated files:`n- oi_data.csv"
|
|
||||||
}
|
|
||||||
|
|
||||||
$message = New-Object System.Net.Mail.MailMessage $from, $to
|
|
||||||
$message.Subject = $subject
|
|
||||||
$message.Body = $body
|
|
||||||
|
|
||||||
$smtp = New-Object System.Net.Mail.SmtpClient $smtpServer, $smtpPort
|
|
||||||
$smtp.EnableSsl = $true
|
|
||||||
$smtp.Credentials = New-Object System.Net.NetworkCredential $smtpUser, $smtpPass
|
|
||||||
|
|
||||||
$smtp.Send($message)
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Summary
|
|
||||||
|
|
||||||
**Quick Start Checklist:**
|
|
||||||
|
|
||||||
- [ ] Python 3.9+ installed
|
|
||||||
- [ ] CME QuikStrike account created
|
|
||||||
- [ ] `.env` file configured with credentials
|
|
||||||
- [ ] Dependencies installed (`pip install -r requirements.txt`)
|
|
||||||
- [ ] Playwright browser installed (`playwright install chromium`)
|
|
||||||
- [ ] Manual test successful (`python main.py`)
|
|
||||||
- [ ] `oi_data.csv` created and valid
|
|
||||||
- [ ] Task Scheduler task created
|
|
||||||
- [ ] Task tested manually
|
|
||||||
- [ ] CSV copied to MT5 Files directory
|
|
||||||
- [ ] EA configured to use CSV file
|
|
||||||
|
|
||||||
**Daily Workflow:**
|
|
||||||
|
|
||||||
1. Task Scheduler runs at 9:00 AM
|
|
||||||
2. Batch file executes Python scraper
|
|
||||||
3. Scraper logs in with saved cookies (or fresh login)
|
|
||||||
4. OI data extracted and saved to `oi_data.csv`
|
|
||||||
5. CSV copied to MT5 Files directory
|
|
||||||
6. EA reads updated OI data
|
|
||||||
7. EA uses new OI levels for trading
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Support
|
|
||||||
|
|
||||||
For issues or questions:
|
|
||||||
|
|
||||||
1. Check `scraper.log` for detailed error messages
|
|
||||||
2. Review screenshots (login_failed.png, login_error.png)
|
|
||||||
3. Verify `.env` configuration
|
|
||||||
4. Test manually without Task Scheduler
|
|
||||||
5. Check Windows Event Viewer for system errors
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Last Updated:** January 4, 2026
|
|
||||||
**Version:** 1.0
|
|
||||||
**Platform:** Windows 10/11
|
|
||||||
@@ -1,3 +1,11 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""
|
||||||
|
CME OI Scraper - Extracts Open Interest data from CME QuikStrike and gold price from investing.com
|
||||||
|
Usage: python main.py
|
||||||
|
Requires: pip install -r requirements.txt
|
||||||
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
@@ -212,19 +220,24 @@ def extract_oi_data(page):
|
|||||||
|
|
||||||
if call_levels:
|
if call_levels:
|
||||||
call_df = pd.DataFrame(call_levels)
|
call_df = pd.DataFrame(call_levels)
|
||||||
call_df = call_df.groupby("Strike", as_index=False).agg({"OI": "max"})
|
call_df = call_df.drop_duplicates(subset="Strike", keep="first")
|
||||||
call_df = call_df.nlargest(TOP_N_STRIKES, "OI")
|
call_df = call_df.sort_values("OI")
|
||||||
|
call_df = call_df.tail(TOP_N_STRIKES)
|
||||||
|
call_df["Type"] = "CALL"
|
||||||
else:
|
else:
|
||||||
call_df = pd.DataFrame()
|
call_df = pd.DataFrame()
|
||||||
|
|
||||||
if put_levels:
|
if put_levels:
|
||||||
put_df = pd.DataFrame(put_levels)
|
put_df = pd.DataFrame(put_levels)
|
||||||
put_df = put_df.groupby("Strike", as_index=False).agg({"OI": "max"})
|
put_df = put_df.drop_duplicates(subset="Strike", keep="first")
|
||||||
put_df = put_df.nlargest(TOP_N_STRIKES, "OI")
|
put_df = put_df.sort_values("OI")
|
||||||
|
put_df = put_df.tail(TOP_N_STRIKES)
|
||||||
|
put_df["Type"] = "PUT"
|
||||||
else:
|
else:
|
||||||
put_df = pd.DataFrame()
|
put_df = pd.DataFrame()
|
||||||
|
|
||||||
result_df = pd.concat([call_df, put_df], ignore_index=True)
|
result_df = pd.concat([call_df, put_df])
|
||||||
|
result_df = result_df[["Type", "Strike", "OI"]]
|
||||||
|
|
||||||
logger.info(f"Final top {TOP_N_STRIKES} unique strikes for CALL and PUT extracted")
|
logger.info(f"Final top {TOP_N_STRIKES} unique strikes for CALL and PUT extracted")
|
||||||
return result_df
|
return result_df
|
||||||
@@ -327,7 +340,7 @@ def run_scraper():
|
|||||||
oi_data = extract_oi_data(page)
|
oi_data = extract_oi_data(page)
|
||||||
save_cookies(context)
|
save_cookies(context)
|
||||||
|
|
||||||
if not oi_data.empty:
|
if len(oi_data) > 0:
|
||||||
logger.info("Extracting gold price from investing.com...")
|
logger.info("Extracting gold price from investing.com...")
|
||||||
future_price = scrape_investing_gold_price(page)
|
future_price = scrape_investing_gold_price(page)
|
||||||
logger.info(f"Gold price extracted: {future_price}")
|
logger.info(f"Gold price extracted: {future_price}")
|
||||||
|
|||||||
13
oi_scraper/run_scheduled.bat
Normal file
13
oi_scraper/run_scheduled.bat
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
@echo off
|
||||||
|
REM ==========================================
|
||||||
|
REM CME OI Scraper - Scheduled Task Version
|
||||||
|
REM For use with Windows Task Scheduler
|
||||||
|
REM ==========================================
|
||||||
|
|
||||||
|
REM Navigate to script directory
|
||||||
|
cd /d %~dp0
|
||||||
|
|
||||||
|
REM Activate virtual environment and run scraper (no pause)
|
||||||
|
call venv\Scripts\activate.bat
|
||||||
|
python main.py
|
||||||
|
exit %ERRORLEVEL%
|
||||||
@@ -1,47 +1,21 @@
|
|||||||
@echo off
|
@echo off
|
||||||
REM ==========================================
|
REM ==========================================
|
||||||
REM CME OI Scraper - Automatic Daily Runner
|
REM CME OI Scraper - Run with Virtual Environment
|
||||||
REM ==========================================
|
REM ==========================================
|
||||||
|
|
||||||
REM Change to script directory
|
REM Navigate to script directory
|
||||||
cd /d C:\Users\YourUsername\Gitea\MeanRevisionEA\oi_scraper
|
cd /d %~dp0
|
||||||
|
|
||||||
echo ==========================================
|
echo ==========================================
|
||||||
echo CME OI Scraper - Daily Update
|
echo CME OI Scraper
|
||||||
echo ==========================================
|
echo ==========================================
|
||||||
echo Started at: %date% %time%
|
|
||||||
echo ========================================== >> scraper.log
|
REM Activate virtual environment
|
||||||
|
call venv\Scripts\activate.bat
|
||||||
|
|
||||||
REM Run Python scraper
|
REM Run Python scraper
|
||||||
python main.py >> scraper.log 2>&1
|
python main.py
|
||||||
|
|
||||||
REM Check if scraper succeeded
|
REM Pause for 5 seconds if running manually (not scheduled)
|
||||||
if %ERRORLEVEL% EQU 0 (
|
if "%1"=="--scheduled" goto :eof
|
||||||
echo [%date% %time%] Scraper completed successfully >> scraper.log
|
|
||||||
|
|
||||||
REM Check if CSV file was created
|
|
||||||
if exist oi_data.csv (
|
|
||||||
echo [%date% %time%] CSV file created successfully >> scraper.log
|
|
||||||
|
|
||||||
REM Copy to MetaTrader 5 Files directory
|
|
||||||
REM Update this path to your actual MT5 directory
|
|
||||||
copy oi_data.csv "C:\Users\YourUsername\AppData\Roaming\MetaQuotes\Terminal\[Your_Terminal_ID]\MQL5\Files\oi_data.csv"
|
|
||||||
|
|
||||||
if %ERRORLEVEL% EQU 0 (
|
|
||||||
echo [%date% %time%] CSV copied to MT5 Files directory >> scraper.log
|
|
||||||
) else (
|
|
||||||
echo [%date% %time%] ERROR: Failed to copy CSV to MT5 directory >> scraper.log
|
|
||||||
)
|
|
||||||
) else (
|
|
||||||
echo [%date% %time%] WARNING: oi_data.csv not found >> scraper.log
|
|
||||||
)
|
|
||||||
) else (
|
|
||||||
echo [%date% %time%] ERROR: Scraper failed with error code %ERRORLEVEL% >> scraper.log
|
|
||||||
)
|
|
||||||
|
|
||||||
echo ==========================================
|
|
||||||
echo Completed at: %date% %time%
|
|
||||||
echo ==========================================
|
|
||||||
|
|
||||||
REM Keep window open for 5 seconds to see any errors
|
|
||||||
timeout /t 5
|
timeout /t 5
|
||||||
|
|||||||
21
oi_scraper/run_with_venv.bat
Normal file
21
oi_scraper/run_with_venv.bat
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
@echo off
|
||||||
|
REM ==========================================
|
||||||
|
REM CME OI Scraper - Manual Run with Virtual Environment
|
||||||
|
REM ==========================================
|
||||||
|
|
||||||
|
REM Navigate to script directory
|
||||||
|
cd /d %~dp0
|
||||||
|
|
||||||
|
echo ==========================================
|
||||||
|
echo CME OI Scraper - Manual Run
|
||||||
|
echo ==========================================
|
||||||
|
|
||||||
|
REM Activate virtual environment
|
||||||
|
call venv\Scripts\activate.bat
|
||||||
|
|
||||||
|
REM Run Python scraper
|
||||||
|
python main.py
|
||||||
|
|
||||||
|
echo.
|
||||||
|
echo Scraper completed. Check oi_data.csv for results.
|
||||||
|
timeout /t 5
|
||||||
34
oi_scraper/setup_env.bat
Normal file
34
oi_scraper/setup_env.bat
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
@echo off
|
||||||
|
REM ==========================================
|
||||||
|
REM CME OI Scraper - Virtual Environment Setup
|
||||||
|
REM ==========================================
|
||||||
|
|
||||||
|
echo ==========================================
|
||||||
|
echo Setting up Python Virtual Environment
|
||||||
|
echo ==========================================
|
||||||
|
|
||||||
|
REM Navigate to script directory
|
||||||
|
cd /d %~dp0
|
||||||
|
|
||||||
|
REM Create virtual environment
|
||||||
|
echo Creating virtual environment...
|
||||||
|
py -3 -m venv venv
|
||||||
|
|
||||||
|
REM Activate virtual environment and install dependencies
|
||||||
|
echo Installing dependencies...
|
||||||
|
call venv\Scripts\activate.bat
|
||||||
|
pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
|
|
||||||
|
REM Install playwright browser
|
||||||
|
echo Installing Playwright browser...
|
||||||
|
python -m playwright install chromium
|
||||||
|
|
||||||
|
echo ==========================================
|
||||||
|
echo Setup Complete!
|
||||||
|
echo ==========================================
|
||||||
|
echo.
|
||||||
|
echo To run the scraper, use: run_with_venv.bat
|
||||||
|
echo.
|
||||||
|
|
||||||
|
pause
|
||||||
Reference in New Issue
Block a user