diff --git a/.eslintrc.json b/.eslintrc.json index d1e7e66269..5f99484858 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -59,6 +59,10 @@ "countMarkers": true, "skel": true, "setupPokemonMarker": true, - "updatePokemonMarker": true + "updatePokemonMarker": true, + + "pokemonLabel": true, + "updatePokemonLabel": true, + "updatePokemonLabels": true } } diff --git a/.gitignore b/.gitignore index 25d0ee9ace..45d3fb9159 100644 --- a/.gitignore +++ b/.gitignore @@ -35,4 +35,7 @@ static/js/custom.js !contrib/supervisord/install-reinstall.sh !static/data/* !static/icons/hsl*.png -!static/app/settings.json \ No newline at end of file +!static/app/settings.json +docs/basic-install/mysql.md +config/config.ini.example +docs/first-run/commandline.md diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000..80729c23d4 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,46 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at thunderfox@rocketmap.org. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/README.md b/README.md index d178221fc7..a243b81968 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Live visualization of all the Pokémon (with option to show gyms, raids and Pok * Filters * Independent worker threads (many can be used simultaneously to quickly generate a livemap of a huge geographical area) * Localization (en, fr, pt_br, de, ru, ko, ja, zh_tw, zh_cn, zh_hk) -* DB storage (sqlite or mysql) of all found Pokémon +* DB storage (mysql) of all found Pokémon * Incredibly fast, efficient searching algorithm (compared to everything else available) ## Information diff --git a/config/config.ini.example b/config/config.ini.example index 8495956a7c..b6cf06ad21 100644 --- a/config/config.ini.example +++ b/config/config.ini.example @@ -25,7 +25,7 @@ #speed-scan # Use speed-scan as the search scheduler. #location: # Location, can be an address or coordinates. #step-limit: # Steps (default=10) -#scan-delay: # Time delay between requests in scan threads. (default=12) +#scan-delay: # Time delay between requests in scan threads. (default=10) #no-gyms # Disables gym scanning (default=False) #no-pokemon # Disables pokemon scanning. (default=False) #no-pokestops # Disables pokestop scanning. (default=False) @@ -39,8 +39,6 @@ # Database settings ################### -#db-type: sqlite # sqlite (default) or mysql -#db: # Database filename for SQLite. (default='pogom.db') #db-host: # Required for mysql () #db-name: # Required for mysql #db-user: # Required for mysql @@ -79,7 +77,7 @@ #workers-per-hive: # Only referenced when using --beehive. Sets number of workers per hive. (default=1) #workers: # Number of search worker threads to start. (default=#accounts) #spawn-delay: # Number of seconds after spawn time to wait before scanning to be sure the Pokemon is there. (default=10) -#kph: # Set a maximum speed in km/hour for scanner movement. (default=35) +#kph: # Set a maximum speed in km/hour for scanner movement. 0 to disable. (default=35) #bad-scan-retry: # Number of bad scans before giving up on a step. (default=2, 0 to disable) #skip-empty # Enables skipping of empty cells in normal scans - requires previously populated database. (not to be used with -ss) #min-seconds-left: # Time that must be left on a spawn before considering it too late and skipping it. (default=0) @@ -109,7 +107,7 @@ #encounter-delay: # Delay in seconds before starting an encounter. Must not be zero. (default=1) #high-lvl-accounts: # File containing a list high level accounts, in the format "auth_service,username,password" #enc-whitelist-file: # File containing a list of Pokemon IDs to encounter for IV/CPs. Requires L30 or higher accounts in --high-lvl-accounts. -#hlvl-kph: # Set a maximum speed in km/hour for high level account scanning. (default=25) +#hlvl-kph: # Set a maximum speed in km/hour for high level account scanning. 0 to disable. (default=25) # Webserver settings @@ -152,7 +150,12 @@ #wh-types:[] # List of events to be sent: pokemon, gym, raid, egg, tth, gym-info, pokestop, lure, captcha. (default= nothing) #wh-threads: # Number of webhook threads; increase if the webhook queue falls behind. (default=1) #wh-retries: # Number of times to retry sending webhook data on failure (default=5) +<<<<<<< HEAD #wh-timeout: # Timeout (in seconds) for webhook requests (default=1). +======= +#wh-connect-timeout: # Connect timeout (in seconds) for webhook requests (default=1). +#wh-read-timeout: # Read timeout (in seconds) for webhook requests (default=1). +>>>>>>> origin/develop #wh-concurrency: # Async requests pool size. (default=25) #wh-backoff-factor: # Factor (in seconds) by which the delay until next retry will increase. (default=0.25). #wh-lfu-size: # Webhook LFU cache max size (default=1000). diff --git a/config/shared-config.ini.example b/config/shared-config.ini.example index 34a2c24361..f0231c8cd3 100644 --- a/config/shared-config.ini.example +++ b/config/shared-config.ini.example @@ -14,8 +14,6 @@ # Other shared settings ################### -#db-type: # sqlite (default) or mysql -#db: # Database filename for SQLite. (default='pogom.db') #db-host: # Required for mysql () #db-name: # Required for mysql #db-user: # Required for mysql diff --git a/docs/advanced-install/docker.md b/docs/advanced-install/docker.md index 9e3f319547..757441a0f6 100644 --- a/docs/advanced-install/docker.md +++ b/docs/advanced-install/docker.md @@ -11,9 +11,7 @@ If you are not familiar or don't feel confortable with Python, pip or any of the ## Introduction -The quickest way to get RocketMap up and running with docker is quite simple. However, given the disposable nature of docker containers, and the fact that the default database for RocketMap is SQLite, your data won't be persistent. In case the container is stopped or crashes, all the collected data will be lost. - -If that doesn't bother you, and you just want to give RocketMap a go, keep on reading. If you prefer a persistent setup, skip to "Advanced Docker Setup" +The quickest way to get RocketMap up and running with docker is quite simple. However you need to setup an external mysql database to make it work so be sure to read the tutorial until the "Advanced Docker Setup" ## Simple Docker Setup @@ -166,7 +164,6 @@ docker run -d --name pogomap --net=pogonw -p 5000:5000 \ -k 'your-google-maps-key' \ -l 'lat, lon' \ -st 5 \ - --db-type mysql \ --db-host db \ --db-port 3306 \ --db-name pogodb \ @@ -195,7 +192,6 @@ docker run -d --name pogomap2 --net=pogonw \ -k 'your-google-maps-key' \ -l 'newlat, newlon' \ -st 5 \ - --db-type mysql \ --db-host db \ --db-port 3306 \ --db-name pogodb \ @@ -251,7 +247,6 @@ docker run -d --name pogomap --net=pogonw -p 5000:5000 \ -k 'your-google-maps-key' \ -l 'lat, lon' \ -st 5 \ - --db-type mysql \ --db-host db \ --db-port 3306 \ --db-name pogodb \ diff --git a/docs/basic-install/index.rst b/docs/basic-install/index.rst index 562fbbb3a5..cfcd802b55 100644 --- a/docs/basic-install/index.rst +++ b/docs/basic-install/index.rst @@ -11,7 +11,11 @@ Follow one of the guides below to get the basic prerequisites installed: * :doc:`osx` * :doc:`windows` * :doc:`linux` - + +You will also need a MySQL server installed: + + * :doc:`mysql` + Credentials *********** @@ -140,19 +144,19 @@ Enables captcha solving and 2Captcha API key. (Manual captcha available, see `Fu Things to Know ************** - * You may want to use more than one account to scan with RocketMap. `Here `_ is how to use as many accounts as your heart desires. - * Your accounts need to complete the tutorial before they will be any use to RocketMap! `Here `_ is how do that with RM. - * You might experience your accounts encountering Captchas at some point. `Here `_ is how we handle those. + * You may want to use more than one account to scan with RocketMap. `Here `_ is how to use as many accounts as your heart desires. + * Your accounts need to complete the tutorial before they will be any use to RocketMap! `Here `_ is how do that with RM. + * You might experience your accounts encountering Captchas at some point. `Here `_ is how we handle those. * Due to recent updates, you might experience a shaddow ban. `Here `_ is what you need to know. - * All of these flags can be set inside of a configuration file to avoid clutter in the command line. Go `here `_ to see how. - * A full list of all commands are available `here. `_ + * All of these flags can be set inside of a configuration file to avoid clutter in the command line. Go `here `_ to see how. + * A full list of all commands are available `here. `_ * A few tools to help you along the way are located `here. `_ Updating the Application ************************ -RocketMap is a very active project and updates often. You can follow the `latest changes `_ to see what's changing. +RocketMap is a very active project and updates often. You can follow the `latest changes `_ to see what's changing. You can update with a few quick commands: @@ -162,6 +166,6 @@ You can update with a few quick commands: pip install -r requirements.txt --upgrade (Prepend sudo -H on Linux) npm run build -Watch the `latest changes `_ on `Discord `_ to know when updating will require commands other than above. - +Watch the `latest changes `_ on `Discord `_ to know when updating will require commands other than above. + **IMPORTANT** Some updates will include database changes that run on first startup. You should run only **one** ``runserver.py`` command until you are certain that the DB has been updated. You will know almost immediately that your DB needs updating if **Detected database version x, updating to x** is printed in the console. This can take a while so please be patient. Once it's done, you can start all your instances like you normally would. diff --git a/docs/basic-install/linux.rst b/docs/basic-install/linux.rst index dc6e2bf6a3..fa3f1d86ea 100644 --- a/docs/basic-install/linux.rst +++ b/docs/basic-install/linux.rst @@ -24,7 +24,9 @@ Debian's sources lists are out of date and will not fetch the correct versions o curl -sL https://raw.githubusercontent.com/nodesource/distributions/master/deb/setup_8.x | sudo -E bash - - sudo apt-get install -y build-essential libbz2-dev libsqlite3-dev libreadline-dev libssl-dev libffi-dev zlib1g-dev libncurses5-dev libssl-dev libgdbm-dev python python-dev nodejs + sudo apt-get install -y build-essential libbz2-dev libreadline-dev libssl-dev libffi-dev zlib1g-dev libncurses5-dev libssl-dev libgdbm-dev python python-dev nodejs + + curl -sL https://bootstrap.pypa.io/get-pip.py | sudo python - curl -sL https://bootstrap.pypa.io/get-pip.py | sudo python - diff --git a/docs/extras/mysql.md b/docs/basic-install/mysql.md similarity index 90% rename from docs/extras/mysql.md rename to docs/basic-install/mysql.md index 48d6494deb..5168b34b22 100644 --- a/docs/extras/mysql.md +++ b/docs/basic-install/mysql.md @@ -1,13 +1,12 @@ -# Using a MySQL Server +# Installing MySQL -**This is a guide for windows only currently.** -**Preliminary Linux (Debian) instructions below (VII)** +**This guide is primarily for Windows.** +**However, preliminary Linux (Debian) instructions are below (VII)** **Preliminary Docker (modern Linux OS w/ Docker & git installed) instructions below (VIII)** ## I. Prerequisites -1. Have already ran/operated the RocketMap using the default database setup. -2. Have the "develop" build of RocketMap. [Available here.](https://rocketmap.readthedocs.io/en/develop/basic-install/index.html) -3. Downloaded [MariaDB](https://downloads.mariadb.org/) +1. Have the "develop" build of RocketMap. [Available here.](https://rocketmap.readthedocs.io/en/develop/basic-install/index.html) +2. Downloaded [MariaDB](https://downloads.mariadb.org/) ## II. Installing MariaDB 1. Run the install file, for me this was: mariadb-10.1.16-winx64.msi @@ -59,7 +58,6 @@ - Change "username" to your respective username for the selected service. - Change "password" to your respective password for the username on the selected service. - **Database Settings:** This is the important section you will want to modify. - - Change the "db-type" to "mysql" - Change "db-host" to "127.0.0.1" - Change "db-name:" to "rocketmapdb" - Change "db-user:" to "rocketmapuser" @@ -73,14 +71,15 @@ 5. Make sure you've removed all of the `#` from any line with a value you inputted. Indent the comments that are after the values as well, so they are on the following line below the variable they represent. For example: ``` # Database settings - db-type: mysql - # sqlite (default) or mysql + db-name: mydb + # Required ``` 6. Go to File->Save as... and make sure you save this file into the same directory as the "config.ini.example", but obviously save it as "config.ini". Make sure it's saved as a .ini file type, and not anything else or it won't work. 7. You're now done configuring your config.ini file. -## V. Run it! +MySQL is now installed, return to the main install guide. If you've encountered any errors it's most likely due to missing a parameter you commented out when you call runserver.py or you mis-typed something in your `config.ini`. However, if it's neither of those issues and something not covered in this guide hop into the RocketMap discord server, and go to the help channel. People there are great, and gladly assist people with troubleshooting issues. +<<<<<<< HEAD:docs/extras/mysql.md Now that we have our server setup and our config.ini filled out it's time to actually run the workers to make sure everything is in check. Remember from above if you commented out any parameters in the util.py file that all of those parameters need to be met and filled out when you run the runserver.py script. In our case we commented out location, and steps so we could individual choose where each worker scanned, and the size of the scan. I've put two code snippets below, one would be used if you didn't comment out anything and instead filled out the **[Search_Settings]** in section IV step 4 above. The other code snippet is what you would run if you commented out the same lines as I did in our running example. ``` @@ -96,6 +95,8 @@ python runserver.py -st 10 -l "[LOCATION]" You should now be up and running. If you've encountered any errors it's most likely due to missing a parameter you commented out when you call runserver.py or you mis-typed something in your `config.ini`. However, if it's neither of those issues and something not covered in this guide hop into the RocketMap discord server, and go to the help channel. People there are great, and gladly assist people with troubleshooting issues. +======= +>>>>>>> origin/develop:docs/basic-install/mysql.md ## Set up MySQL on a second computer, seperate from RM instances. In this example, computer running RocketMap will be `Server A` while computer running MySQL will be `Server B`. @@ -120,7 +121,6 @@ You need to tell RocketMap where the database is! ``` **Database Settings:** This is the important section you will want to modify. - - Change the "db-type" to "mysql" - Change "db-host" to [IP ADDDRESS OF SERVER B] - Change "db-name:" to "rocketmapdb" - Change "db-user:" to "rocketmapuser" @@ -151,11 +151,10 @@ You need to tell RocketMap where the database is! ``` # Database settings - db-type: mysql # sqlite (default) or mysql - db-host: 127.0.0.1 # required for mysql - db-name: rocketmapdb # required for mysql - db-user: rocketmapuser # required for mysql - db-pass: YourPW # required for mysql + db-host: 127.0.0.1 + db-name: rocketmapdb # required + db-user: rocketmapuser # required + db-pass: YourPW # required ``` ## Docker Settings w/ Let's Encrypt @@ -172,7 +171,7 @@ _only pain comes from mysql5.7 and beyond_ ``` docker run --name mainmap -d --link pokesql pokemap --auth-service=ptc \ - --username=youruser --password=yourpassword --db-type=mysql --db-host=pokesql \ + --username=youruser --password=yourpassword --db-host=pokesql \ --db-name=pokemap --db-user=root --db-pass=some-string --gmaps-key=someapikey ``` @@ -183,7 +182,7 @@ _OPTIONAL: always scan Austin, TX (SQL benchmark?)_ ``` docker run --name scanagent -d --link pokesql pokemap --no-server \ --auth-service=ptc --location="Austin, TX" --username=yourotheruser \ - --password=yourotherpassword --db-type=mysql --db-host=pokemap \ + --password=yourotherpassword --db-host=pokemap \ --db-name=pokemap --db-user=root --db-pass=some-string \ --gmaps-key=some-api-key ``` diff --git a/docs/common-issues/faq.md b/docs/common-issues/faq.md index 75b8a113e3..5e5e0566bc 100644 --- a/docs/common-issues/faq.md +++ b/docs/common-issues/faq.md @@ -28,7 +28,7 @@ If you happen to have 2-step verification enabled for your Google account you wi SpeedScan (`-speed`) is the most used scheduler: it's the only scheduler that currently supports finding the proper spawnpoint time and duration, and it also features a built-in speed limiter to avoid speed violations (i.e. softbans). -More information can be found here : [Speed Scheduler](http://rocketmap.readthedocs.io/en/develop/scanning-method/Speed-Scheduler.html) +More information can be found here : [Speed Scheduler](http://rocketmap.readthedocs.io/en/develop/scanning-method/speed-scheduler.html) ### But I was happy using the default Hex or -ss... @@ -69,14 +69,6 @@ For a rough guide you can use the formulas at the bottom of this page. Seb deleted it, it was the only good thing left in our lives. Seb has murdered us all. -### I have problems with my database because...... - -RocketMap uses SQLite which doesn't support real concurrency, so you're limited directly by the read/write speed of your drive and you're hoping that nothing happens concurrently (otherwise it breaks). - -Higher threads or extra workers = increased odds of SQLite locking up. sqlite also has a very low limit of number of variables that can be used in a single query, which breaks support for medium or large sized maps. - -You need [MySQL](http://rocketmap.readthedocs.io/en/develop/extras/mysql.html) if you want a proper database. - ### How do I setup port forwarding? [See this helpful guide](http://rocketmap.readthedocs.io/en/develop/extras/external.html) @@ -155,14 +147,6 @@ InternalError(1054, u"unknown column 'cp' in 'field list'") or similar Only one instance can run when the database is being modified or upgraded. Run ***ONE*** instance of RM with `-cd` to wipe your database, then run ***ONE*** instance of RM (without `-cd`) to setup your database. -#### SQLite query limit - -``` -OperationalError: too many SQL variables -``` - -Due to SQLite supporting only a small amount of variables in a single query, you will need to use MySQL as you are above said limit. This is typically due to the adding of more workers/area to your map. - #### Certificate errors ``` diff --git a/docs/extras/spawnpoint-scanning-scheduler.md b/docs/extras/spawnpoint-scanning-scheduler.md index 456b282562..6cdfb2d7ee 100644 --- a/docs/extras/spawnpoint-scanning-scheduler.md +++ b/docs/extras/spawnpoint-scanning-scheduler.md @@ -1,47 +1,41 @@ # Spawnpoint Scanning Scheduler -If you already have a large number of known spawnpoints it may be worth looking into Spawnpoint Scanning +If you already have a 100% completed Initial Scan, it may be worth looking into Spawnpoint Scanning. -Spawnpoint Scanning consists of only scanning an area in which a spawn has recently happened, this saves a large number of requests and also detects all spawns soon after they appear instead of whenever the scan gets round to them again +Spawnpoint Scanning consists of only scanning an area in which a spawn has recently happened, this saves a large number of requests and also detects all spawns soon after they appear instead of whenever the scan gets round to them again. -Spawnpoint Scanning is particularly useful in areas where spawns are spread out +Spawnpoint Scanning is particularly useful in areas where spawns are spread out. -## Spawnpoint Scanning can be run in one of three different modes: +## Spawnpoint Scanning can be run in one of two different modes: -### Scans based on database +### Scans without Spawnpoint Clustering ``` python runserver.py -ss -l YOURLOCATION -st STEPS ``` -Where YOURLOCATION is the location the map should be centered at and also the center of the hex to get spawn locations from, -st sets the size of the clipping hexagon (hexagon is the same size as the scan of the same -st value) +Where YOURLOCATION is the location the map should be centered at and also the center of the hex to get spawn locations from, -st sets the size of the clipping hexagon (hexagon is the same size as the scan of the same -st value). -This is particularly useful for when using a beehive +This is particularly useful for when using a beehive. -Note: when using the mode when not in a beehive, it is recommended to use an -st value one higher than the scan was done on, to avoid very edge spawns being clipped off +Note: When using the mode when not in a beehive, it is recommended to use an -st value one higher than the scan was done on, to avoid very edge spawns being clipped off. -### Dump scans from database then use the created file +### Scans with Spawnpoint Clustering ``` -python runserver.py -ss YOURFILE.json -l YOURLOCATION -st STEPS --dump-spawnpoints +python runserver.py -ss -ssct YOURVALUE -l YOURLOCATION -st STEPS ``` -Where YOURFILE.json is the file containing all the spawns, YOURLOCATION is the location the map should be centered at and also the center of the hex to get spawn locations from and -st sets the size of the clipping hexagon (hexagon is the same size as the scan of the same -st value) +Where YOURLOCATION is the location the map should be centered at and also the center of the hex to get spawn locations from, -st sets the size of the clipping hexagon (hexagon is the same size as the scan of the same -st value), -ssct (Spawnpoint Cluster Time) sets a Time threshold (in seconds) for spawn point clustering. +A Value around 200 seconds is recommended. +Spawnpoint Clustering can help to reduce requests and also your worker count because its compressing several Spawnpoints into a cluster. Cluster time will try to schedule scans at the same position within -ssct amount of seconds to catch multiple spawns at once. -This mode is mainly used for switching from database mode to spawnFile mode, and can also be used simply for dumping all spawns to file (use a very large -st and close the program once it has created the file) -### Scans based on file +This is particularly useful for when using a beehive. -``` -python runserver.py -ss YOURFILE.json -l YOURLOCATION -``` +Note: When using the mode when not in a beehive, it is recommended to use an -st value one higher than the scan was done on, to avoid very edge spawns being clipped off. -Where YOURFILE.json is the file containing all the spawns, and YOURLOCATION is the location the map should be centered at (YOURLOCATION is not used for anything else in this mode) - -Note: in this mode -st does nothing ### Getting spawns -for generating the spawns to use with Spawnpoint Scanning it is recommended to scan the area with a scan that completes in 10 minutes for at least 1 hour, this should guarantee that all spawns are found - -spawn files can also be generated with an external tool such as spawnScan +For generating the spawns to use with Spawnpoint Scanning it is recommended to scan the area with -speed until the initial scan reaches 100%. diff --git a/docs/extras/webhooks.md b/docs/extras/webhooks.md index 5daba0f7f5..5bb1eb3172 100644 --- a/docs/extras/webhooks.md +++ b/docs/extras/webhooks.md @@ -379,6 +379,7 @@ Note: `raid` events use the same event type and fields as `egg` events, but the | Field | Details | Example | | ------------ | ----------------------------------------------------------------- | ------------ | | `gym_id` | The gym's unique ID | `"NGY2ZjBjY2Y3OTUyNGQyZW`
`FlMjc3ODkzODM2YmI1Y2YuMTY="` | +| `team_id` | The team that currently controls the gym1 | `1` | | `latitude` | The gym's latitude | `43.599321` | | `longitude` | The gym's longitude | `5.181415` | | `spawn` | The time at which the raid spawned | `1500992342` | @@ -390,6 +391,15 @@ Note: `raid` events use the same event type and fields as `egg` events, but the | `move_1` | The raid boss's quick move | `274` | | `move_2` | The raid boss's charge move | `275` | +1. The teams are represented by the values: + +| Value | Team | +| ------- | ----------- | +| `0` | Uncontested | +| `1` | Mystic | +| `2` | Valor | +| `3` | Instinct | + ### `tth` A `tth` event is sent whenever a scan instance's TTH completion status changes by more than 1%. diff --git a/docs/first-run/commandline.md b/docs/first-run/commandline.md index 94ebe78943..6809e2de63 100644 --- a/docs/first-run/commandline.md +++ b/docs/first-run/commandline.md @@ -1,67 +1,67 @@ # Command Line - usage: runserver.py [-h] [-cf CONFIG] [-a AUTH_SERVICE] [-u USERNAME] - [-p PASSWORD] [-w WORKERS] [-asi ACCOUNT_SEARCH_INTERVAL] + usage: runserver.py [-h] [-cf CONFIG] [-scf SHARED_CONFIG] [-a AUTH_SERVICE] + [-u USERNAME] [-p PASSWORD] [-w WORKERS] + [-asi ACCOUNT_SEARCH_INTERVAL] [-ari ACCOUNT_REST_INTERVAL] [-ac ACCOUNTCSV] [-hlvl HIGH_LVL_ACCOUNTS] [-bh] [-wph WORKERS_PER_HIVE] [-l LOCATION] [-alt ALTITUDE] [-altv ALTITUDE_VARIANCE] - [-uac] [-nj] [-al] [-st STEP_LIMIT] [-gf GEOFENCE_FILE] + [-uac] [-j] [-al] [-st STEP_LIMIT] [-gf GEOFENCE_FILE] [-gef GEOFENCE_EXCLUDED_FILE] [-sd SCAN_DELAY] [--spawn-delay SPAWN_DELAY] [-enc] [-cs] [-ck CAPTCHA_KEY] [-cds CAPTCHA_DSK] [-mcd MANUAL_CAPTCHA_DOMAIN] [-mcr MANUAL_CAPTCHA_REFRESH] [-mct MANUAL_CAPTCHA_TIMEOUT] [-ed ENCOUNTER_DELAY] [-ignf IGNORELIST_FILE] [-encwf ENC_WHITELIST_FILE] +<<<<<<< HEAD [-nostore] [-wwhtf WEBHOOK_WHITELIST_FILE] +======= + [-nostore] [-apir API_RETRIES] + [-wwht WEBHOOK_WHITELIST | -wblk WEBHOOK_BLACKLIST | -wwhtf WEBHOOK_WHITELIST_FILE | -wblkf WEBHOOK_BLACKLIST_FILE] +>>>>>>> origin/develop [-ld LOGIN_DELAY] [-lr LOGIN_RETRIES] [-mf MAX_FAILURES] [-me MAX_EMPTY] [-bsr BAD_SCAN_RETRY] [-msl MIN_SECONDS_LEFT] [-dc] [-H HOST] [-P PORT] [-L LOCALE] [-c] [-m MOCK] [-ns] [-os] [-sc] [-nfl] -k - GMAPS_KEY [--skip-empty] [-C] [-D DB] [-cd] [-np] [-ng] - [-nr] [-nk] [-ss [SPAWNPOINT_SCANNING]] [-speed] [-spin] + GMAPS_KEY [--skip-empty] [-C] [-cd] [-np] [-ng] [-nr] + [-nk] [-ss] [-ssct SS_CLUSTER_TIME] [-speed] [-spin] [-ams ACCOUNT_MAX_SPINS] [-kph KPH] [-hkph HLVL_KPH] - [-ldur LURE_DURATION] [--dump-spawnpoints] - [-pd PURGE_DATA] [-px PROXY] [-pxsc] + [-ldur LURE_DURATION] [-pd PURGE_DATA] [-px PROXY] [-pxsc] [-pxt PROXY_TEST_TIMEOUT] [-pxre PROXY_TEST_RETRIES] [-pxbf PROXY_TEST_BACKOFF_FACTOR] [-pxc PROXY_TEST_CONCURRENCY] [-pxd PROXY_DISPLAY] [-pxf PROXY_FILE] [-pxr PROXY_REFRESH] - [-pxo PROXY_ROTATION] [--db-type DB_TYPE] - [--db-name DB_NAME] [--db-user DB_USER] - [--db-pass DB_PASS] [--db-host DB_HOST] - [--db-port DB_PORT] - [--db-threads DB_THREADS] [-wh WEBHOOKS] [-gi] - [--enable-clean] + [-pxo PROXY_ROTATION] --db-name DB_NAME --db-user DB_USER + --db-pass DB_PASS [--db-host DB_HOST] [--db-port DB_PORT] + [--db-threads DB_THREADS] [-wh WEBHOOKS] [-gi] [-DC] [--wh-types {pokemon,gym,raid,egg,tth,gym-info,pokestop,lure,captcha}] [--wh-threads WH_THREADS] [-whc WH_CONCURRENCY] - [-whr WH_RETRIES] [-wht WH_TIMEOUT] - [-whbf WH_BACKOFF_FACTOR] [-whlfu WH_LFU_SIZE] - [-whfi WH_FRAME_INTERVAL] + [-whr WH_RETRIES] [-whct WH_CONNECT_TIMEOUT] + [-whrt WH_READ_TIMEOUT] [-whbf WH_BACKOFF_FACTOR] + [-whlfu WH_LFU_SIZE] [-whfi WH_FRAME_INTERVAL] [--ssl-certificate SSL_CERTIFICATE] [--ssl-privatekey SSL_PRIVATEKEY] [-ps [logs]] [-slt STATS_LOG_TIMER] [-sn STATUS_NAME] [-spp STATUS_PAGE_PASSWORD] [-hk HASH_KEY] [-novc] - [-vci VERSION_CHECK_INTERVAL] [-el ENCRYPT_LIB] - [-odt ON_DEMAND_TIMEOUT] [--disable-blacklist] - [-tp TRUSTED_PROXIES] [--api-version API_VERSION] - [--no-file-logs] [--log-path LOG_PATH] [--dump] - [-v | --verbosity VERBOSE] + [-vci VERSION_CHECK_INTERVAL] [-odt ON_DEMAND_TIMEOUT] + [--disable-blacklist] [-tp TRUSTED_PROXIES] + [--api-version API_VERSION] [--no-file-logs] + [--log-path LOG_PATH] [--dump] [-v | --verbosity VERBOSE] - Args that start with '--' (eg. -a) can also be set in a config file - (/config/config.ini or - specified via -cf). The recognized syntax for setting (key, value) pairs is - based on the INI and YAML formats (e.g. key=value or foo=TRUE). For full - documentation of the differences from the standards please refer to the - ConfigArgParse documentation. If an arg is specified in more than one place, - then commandline values override environment variables which override config - file values which override defaults. +Args that start with '--' (eg. -a) can also be set in a config file +(config/config.ini or specified via +-cf or -scf). Config file syntax allows: key=value, flag=true, stuff=[a,b,c] +(for details, see syntax at https://goo.gl/R74nmi). If an arg is specified in +more than one place, then commandline values override environment variables +which override config file values which override defaults. optional arguments: - -h, --help show this help message and exit [env var: - POGOMAP_HELP] + -h, --help show this help message and exit -cf CONFIG, --config CONFIG Set configuration file + -scf SHARED_CONFIG, --shared-config SHARED_CONFIG + Set a shared config -a AUTH_SERVICE, --auth-service AUTH_SERVICE Auth Services, either one for all accounts or one per account: ptc or google. Defaults all to ptc. [env var: @@ -85,11 +85,11 @@ switched out. [env var: POGOMAP_ACCOUNT_REST_INTERVAL] -ac ACCOUNTCSV, --accountcsv ACCOUNTCSV Load accounts from CSV file containing - "auth_service,username,passwd" lines. [env var: + "auth_service,username,password" lines. [env var: POGOMAP_ACCOUNTCSV] -hlvl HIGH_LVL_ACCOUNTS, --high-lvl-accounts HIGH_LVL_ACCOUNTS Load high level accounts from CSV file containing - "auth_service,username,passwd" lines. [env var: + "auth_service,username,password" lines. [env var: POGOMAP_HIGH_LVL_ACCOUNTS] -bh, --beehive Use beehive configuration for multiple accounts, one account per hex. Make sure to keep -st under 5, and -w @@ -112,8 +112,13 @@ Query the Elevation API for each step, rather than only once, and store results in the database. [env var: POGOMAP_USE_ALTITUDE_CACHE] +<<<<<<< HEAD -j, --jitter Apply random -5m to +5m jitter to location. [env var: POGOMAP_JITTER] +======= + -j, --jitter Apply random -5m to +5m jitter to location. [env var: + POGOMAP_JITTER] +>>>>>>> origin/develop -al, --access-logs Write web logs to access.log. [env var: POGOMAP_ACCESS_LOGS] -st STEP_LIMIT, --step-limit STEP_LIMIT @@ -171,6 +176,18 @@ accounts in memory. This will increase the number of logins per account, but decreases memory usage. [env var: POGOMAP_NO_API_STORE] +<<<<<<< HEAD +======= + -apir API_RETRIES, --api-retries API_RETRIES + Number of times to retry an API request. [env var: + POGOMAP_API_RETRIES] + -wwht WEBHOOK_WHITELIST, --webhook-whitelist WEBHOOK_WHITELIST + List of Pokemon to send to webhooks. Specified as + Pokemon ID. [env var: POGOMAP_WEBHOOK_WHITELIST] + -wblk WEBHOOK_BLACKLIST, --webhook-blacklist WEBHOOK_BLACKLIST + List of Pokemon NOT to send to webhooks. Specified as + Pokemon ID. [env var: POGOMAP_WEBHOOK_BLACKLIST] +>>>>>>> origin/develop -wwhtf WEBHOOK_WHITELIST_FILE, --webhook-whitelist-file WEBHOOK_WHITELIST_FILE File containing a list of Pokemon IDs or names to be sent to webhooks. [env var: POGOMAP_WEBHOOK_WHITELIST_FILE] @@ -204,9 +221,8 @@ -H HOST, --host HOST Set web server listening host. [env var: POGOMAP_HOST] -P PORT, --port PORT Set web server listening port. [env var: POGOMAP_PORT] -L LOCALE, --locale LOCALE - Locale for Pokemon names (default: en, check - static/dist/locales for more). [env var: - POGOMAP_LOCALE] + Locale for Pokemon names (check static/dist/locales + for more). [env var: POGOMAP_LOCALE] -c, --china Coordinates transformer for China. [env var: POGOMAP_CHINA] -m MOCK, --mock MOCK Mock mode - point to a fpgo endpoint instead of using @@ -230,7 +246,6 @@ requires previously populated database (not to be used with -ss) [env var: POGOMAP_SKIP_EMPTY] -C, --cors Enable CORS on web server. [env var: POGOMAP_CORS] - -D DB, --db DB Database filename for SQLite. [env var: POGOMAP_DB] -cd, --clear-db Deletes the existing database before starting the Webserver. [env var: POGOMAP_CLEAR_DB] -np, --no-pokemon Disables Pokemon from the map (including parsing them @@ -241,10 +256,13 @@ into local db). [env var: POGOMAP_NO_RAIDS] -nk, --no-pokestops Disables PokeStops from the map (including parsing them into local db). [env var: POGOMAP_NO_POKESTOPS] - -ss [SPAWNPOINT_SCANNING], --spawnpoint-scanning [SPAWNPOINT_SCANNING] + -ss, --spawnpoint-scanning Use spawnpoint scanning (instead of hex grid). Scans in a circle based on step_limit when on DB. [env var: POGOMAP_SPAWNPOINT_SCANNING] + -ssct SS_CLUSTER_TIME, --ss-cluster-time SS_CLUSTER_TIME + Time threshold in seconds for spawn point clustering + (0 to disable). [env var: POGOMAP_SS_CLUSTER_TIME] -speed, --speed-scan Use speed scanning to identify spawn points and then scan closest spawns. [env var: POGOMAP_SPEED_SCAN] -spin, --pokestop-spinning @@ -254,17 +272,15 @@ Maximum number of Pokestop spins per hour. [env var: POGOMAP_ACCOUNT_MAX_SPINS] -kph KPH, --kph KPH Set a maximum speed in km/hour for scanner movement. - [env var: POGOMAP_KPH] + 0 to disable. Default: 35. [env var: POGOMAP_KPH] -hkph HLVL_KPH, --hlvl-kph HLVL_KPH Set a maximum speed in km/hour for scanner movement, - for high-level (L30) accounts. [env var: - POGOMAP_HLVL_KPH] + for high-level (L30) accounts. 0 to disable. + Default: 25. [env var: POGOMAP_HLVL_KPH] -ldur LURE_DURATION, --lure-duration LURE_DURATION Change duration for lures set on pokestops. This is useful for events that extend lure duration. [env var: POGOMAP_LURE_DURATION] - --dump-spawnpoints Dump the spawnpoints from the db to json (only for use - with -ss). [env var: POGOMAP_DUMP_SPAWNPOINTS] -pd PURGE_DATA, --purge-data PURGE_DATA Clear Pokemon from database this many hours after they disappear (0 to disable). [env var: @@ -303,18 +319,6 @@ Enable proxy rotation with account changing for search threads (none/round/random). [env var: POGOMAP_PROXY_ROTATION] - --db-type DB_TYPE Type of database to be used (default: sqlite). [env - var: POGOMAP_DB_TYPE] - --db-name DB_NAME Name of the database to be used. [env var: - POGOMAP_DB_NAME] - --db-user DB_USER Username for the database. [env var: POGOMAP_DB_USER] - --db-pass DB_PASS Password for the database. [env var: POGOMAP_DB_PASS] - --db-host DB_HOST IP or hostname for the database. [env var: - POGOMAP_DB_HOST] - --db-port DB_PORT Port for the database. [env var: POGOMAP_DB_PORT] - --db-threads DB_THREADS - Number of db threads; increase if the db queue falls - behind. [env var: POGOMAP_DB_THREADS] -wh WEBHOOKS, --webhook WEBHOOKS Define URL(s) to POST webhook information to. [env var: POGOMAP_WEBHOOK] @@ -333,9 +337,12 @@ -whr WH_RETRIES, --wh-retries WH_RETRIES Number of times to retry sending webhook data on failure. [env var: POGOMAP_WH_RETRIES] - -wht WH_TIMEOUT, --wh-timeout WH_TIMEOUT - Timeout (in seconds) for webhook requests. [env var: - POGOMAP_WH_TIMEOUT] + -whct WH_CONNECT_TIMEOUT, --wh-connect-timeout WH_CONNECT_TIMEOUT + Connect timeout (in seconds) for webhook requests. + [env var: POGOMAP_WH_CONNECT_TIMEOUT] + -whrt WH_READ_TIMEOUT, --wh-read-timeout WH_READ_TIMEOUT + Read timeout (in seconds) for webhookrequests. [env + var: POGOMAP_WH_READ_TIMEOUT] -whbf WH_BACKOFF_FACTOR, --wh-backoff-factor WH_BACKOFF_FACTOR Factor (in seconds) by which the delay until next retry will increase. [env var: @@ -375,9 +382,6 @@ -vci VERSION_CHECK_INTERVAL, --version-check-interval VERSION_CHECK_INTERVAL Interval to check API version in seconds (Default: in [60, 300]). [env var: POGOMAP_VERSION_CHECK_INTERVAL] - -el ENCRYPT_LIB, --encrypt-lib ENCRYPT_LIB - Path to encrypt lib to be used instead of the shipped - ones. [env var: POGOMAP_ENCRYPT_LIB] -odt ON_DEMAND_TIMEOUT, --on-demand_timeout ON_DEMAND_TIMEOUT Pause searching while web UI is inactive for this timeout (in seconds). [env var: @@ -401,3 +405,15 @@ repeated up to 3 times. --verbosity VERBOSE Show debug messages from RocketMap and pgoapi. [env var: POGOMAP_VERBOSITY] + + Database: + --db-name DB_NAME Name of the database to be used. [env var: + POGOMAP_DB_NAME] + --db-user DB_USER Username for the database. [env var: POGOMAP_DB_USER] + --db-pass DB_PASS Password for the database. [env var: POGOMAP_DB_PASS] + --db-host DB_HOST IP or hostname for the database. [env var: + POGOMAP_DB_HOST] + --db-port DB_PORT Port for the database. [env var: POGOMAP_DB_PORT] + --db-threads DB_THREADS + Number of db threads; increase if the db queue falls + behind. [env var: POGOMAP_DB_THREADS] diff --git a/docs/first-run/tutorial.md b/docs/first-run/tutorial.md index f67bed03a8..1398f5d29c 100644 --- a/docs/first-run/tutorial.md +++ b/docs/first-run/tutorial.md @@ -1,8 +1,15 @@ -# Tutorial Completion -## RocketMap's tutorial completion for new accounts -RocketMap now completes the tutorial steps on all accounts on first log in, there is no more `-tut` argument to complete the tutorial. +# Account leveling +RocketMap completes the tutorial steps on all accounts on first log in and sets a Pokémon as buddy if it does not have one. -It's recommended to enable pokestop spinning in the config to get your accounts to level 2. +Accounts with level 1 will get captchas after some time and will stop working unless you setup [catpcha handling](http://rocketmap.readthedocs.io/en/develop/first-run/captchas.html): + + +To avoid this, it's recommended to level accounts at least to level 2. It's as simple as spin a Pokéstop and there are two ways to do so from RM: + + * Enabling pokestop spinning during regular scanning + * Using the level-up tool provided with RM + +## Pokéstop Spinning To enable Pokéstop spinning, add pokestop-spinning to your configuration file, or -spin to your cli parameters. @@ -10,8 +17,26 @@ To enable Pokéstop spinning, add pokestop-spinning to your configuration file, pokestop-spinning ``` -To set the maximum number of Pokéstop spins per account per hour (default: 80), add -ams 30 to your cli parameters or edit your configuration file: +With this setting enabled, RM's scanner instances will try to spin a Pokéstop (50% chance to spin if the account has a level greater than 1) if it's within range and the `--account-max-spins` limit hasn't been reached (default is 20 per account per hour). + +This setting could be enough for some maps with a high density of Pokéstops, as the accounts will get near one soon enough to avoid the captcha, otherwise you will need to enable [catpcha handling](http://rocketmap.readthedocs.io/en/develop/first-run/captchas.html) to keep them working until until there is a Pokéstop within range. + +## Level-up tool + +In the tools folder there is a small python script that will go through the account list, send a map request at a location, and spin all Pokéstops in range (following `account-max-spins` limit). With this tool, you can make sure all accounts are level 2 before using them for scanning. + +The tool uses the same config file and options as RM (the ones that apply) so the setup and run is pretty simple, just change the location to some coordinates that are near 1 or more Pokéstops and change the worker setting to the number of accounts you want to level up simultaneously. + +In the console you will see the initial level of each account, the Pokéstop spinning and the final level. The script will end automatically when all accounts have finished the process or have failed 3 times. + +To run the script, go to RM's root folder and execute: + +``` +python tools/levelup.py +``` + +All command line flags available in RM can be used here too (b(but not all of them will have an effect). So you could increase `account-max-spins` and change location and workers from the command line without needing to modify the config file with something like: ``` -account-max-spins: 30 +python tools/levelup.py -w 30 -l 40.417281,-3.683235 -ams 1000 ``` diff --git a/docs/scanning-method/speed-scheduler.md b/docs/scanning-method/speed-scheduler.md index 13c23682f3..f6a136120e 100644 --- a/docs/scanning-method/speed-scheduler.md +++ b/docs/scanning-method/speed-scheduler.md @@ -4,16 +4,19 @@ Speed Scheduler is an alternative scheduler to Hex Scan or Spawnpoint Scan with ## Features -* Limit speed according to default of 35 kph or by setting -kph +* Limit regular scanning speed according to default of 35 km/h or by setting -kph. 0 to disable. +* Limit high-level account encounter speed according to default of 25 km/h or by setting --hlvl-kph. 0 to disable. * Do an initial scan of the full area, then automatically switch to tracking down the exact spawn time (finding the TTH) and only scan for spawns (an -ss like behaviour). * Add spawn point type identification of the three current types of spawn points -- 15, 30, and 60 minute spawns. -* Change spawn point scans to correct spawn time according to spawnpoint type -* Add scans to complete identification for partially identified spawn points -* Dynamically identify and check duration of new spawn points without requiring return to Hex scanning -* Identify spawn points that have been removed and stop scanning them +* Change spawn point scans to correct spawn time according to spawnpoint type. +* Add scans to complete identification for partially identified spawn points. +* Dynamically identify and check duration of new spawn points without requiring return to Hex scanning. +* Identify spawn points that have been removed and stop scanning them. To use Speed Scheduler, always put -speed in the command line or set `speed-scan` in your config file. +Speed Scheduler is optimized for scanning for Pokémon so it doesn't work well for gym/pokéstop only scanning. If you are interested in scanning for just gyms/pokéstops consider using Hex Scheduler. + ## Commands and configs What command line args should I use for Speed Scheduler? diff --git a/pogom/account.py b/pogom/account.py index 61ed6e782b..210089169c 100644 --- a/pogom/account.py +++ b/pogom/account.py @@ -106,7 +106,388 @@ def setup_mrmime_account(args, status, account): 'enable proxy rotation.', status['proxy_url']) +<<<<<<< HEAD return pgacc +======= + return api + + +# Use API to check the login status, and retry the login if possible. +def check_login(args, account, api, proxy_url): + # Logged in? Enough time left? Cool! + if api._auth_provider and api._auth_provider._access_token: + remaining_time = api._auth_provider._access_token_expiry - time.time() + + if remaining_time > 60: + log.debug( + 'Credentials remain valid for another %f seconds.', + remaining_time) + return + + # Try to login. Repeat a few times, but don't get stuck here. + num_tries = 0 + + # One initial try + login_retries. + while num_tries < (args.login_retries + 1): + try: + if proxy_url: + api.set_authentication( + provider=account['auth_service'], + username=account['username'], + password=account['password'], + proxy_config={'http': proxy_url, 'https': proxy_url}) + else: + api.set_authentication( + provider=account['auth_service'], + username=account['username'], + password=account['password']) + # Success! + break + except AuthException: + num_tries += 1 + log.error( + ('Failed to login to Pokemon Go with account %s. ' + + 'Trying again in %g seconds.'), + account['username'], args.login_delay) + time.sleep(args.login_delay) + + if num_tries > args.login_retries: + log.error( + ('Failed to login to Pokemon Go with account %s in ' + + '%d tries. Giving up.'), + account['username'], num_tries) + raise TooManyLoginAttempts('Exceeded login attempts.') + + time.sleep(random.uniform(2, 4)) + + # Simulate login sequence. + rpc_login_sequence(args, api, account) + + +# Simulate real app via login sequence. +def rpc_login_sequence(args, api, account): + total_req = 0 + app_version = PGoApi.get_api_version() + + # 1 - Make an empty request to mimick real app behavior. + log.debug('Starting RPC login sequence...') + + try: + req = api.create_request() + req.call(False) + + total_req += 1 + time.sleep(random.uniform(.43, .97)) + except Exception as e: + log.exception('Login for account %s failed.' + + ' Exception in call request: %s.', + account['username'], + e) + raise LoginSequenceFail('Failed during empty request in login' + + ' sequence for account {}.'.format( + account['username'])) + + # 2 - Get player information. + log.debug('Fetching player information...') + + try: + req = api.create_request() + req.get_player(player_locale=args.player_locale) + resp = req.call(False) + parse_get_player(account, resp) + + total_req += 1 + time.sleep(random.uniform(.53, 1.1)) + if account['warning']: + log.warning('Account %s has received a warning.', + account['username']) + except Exception as e: + log.exception('Login for account %s failed. Exception in ' + + 'player request: %s.', + account['username'], + e) + raise LoginSequenceFail('Failed while retrieving player information in' + + ' login sequence for account {}.'.format( + account['username'])) + + # 3 - Get remote config version. + log.debug('Downloading remote config version...') + old_config = account.get('remote_config', {}) + + try: + req = api.create_request() + req.download_remote_config_version( + platform=1, + device_model=api.device_info['device_model_boot'], + app_version=app_version) + send_generic_request( + req, account, settings=True, buddy=False, inbox=False) + + total_req += 1 + time.sleep(random.uniform(.53, 1.1)) + except Exception as e: + log.exception('Error while downloading remote config: %s.', e) + raise LoginSequenceFail('Failed while getting remote config version in' + + ' login sequence for account {}.'.format( + account['username'])) + + # 4 - Get asset digest. + log.debug('Fetching asset digest...') + config = account.get('remote_config', {}) + + if config.get('asset_time', 0) > old_config.get('asset_time', 0): + i = random.randint(0, 3) + req_count = 0 + result = 2 + page_offset = 0 + page_timestamp = 0 + + time.sleep(random.uniform(.7, 1.2)) + + while result == 2: + req = api.create_request() + req.get_asset_digest( + platform=1, + app_version=app_version, + paginate=True, + page_offset=page_offset, + page_timestamp=page_timestamp) + resp = send_generic_request(req, account, settings=True, + buddy=False, inbox=False) + + req_count += 1 + total_req += 1 + + if i > 2: + time.sleep(random.uniform(1.4, 1.6)) + i = 0 + else: + i += 1 + time.sleep(random.uniform(.3, .5)) + + try: + # Re-use variable name. Also helps GC. + resp = resp['responses']['GET_ASSET_DIGEST'] + except KeyError: + break + + result = resp.result + page_offset = resp.page_offset + page_timestamp = resp.timestamp_ms + log.debug('Completed %d requests to get asset digest.', + req_count) + + # 5 - Get item templates. + log.debug('Fetching item templates...') + + if config.get('template_time', 0) > old_config.get('template_time', 0): + i = random.randint(0, 3) + req_count = 0 + result = 2 + page_offset = 0 + page_timestamp = 0 + + while result == 2: + req = api.create_request() + req.download_item_templates( + paginate=True, + page_offset=page_offset, + page_timestamp=page_timestamp) + resp = send_generic_request(req, account, settings=True, + buddy=False, inbox=False) + + req_count += 1 + total_req += 1 + + if i > 2: + time.sleep(random.uniform(1.4, 1.6)) + i = 0 + else: + i += 1 + time.sleep(random.uniform(.25, .5)) + + try: + # Re-use variable name. Also helps GC. + resp = resp['responses']['DOWNLOAD_ITEM_TEMPLATES'] + except KeyError: + break + + result = resp.result + page_offset = resp.page_offset + page_timestamp = resp.timestamp_ms + log.debug('Completed %d requests to download' + + ' item templates.', req_count) + + # Check tutorial completion. + if not all(x in account['tutorials'] for x in (0, 1, 3, 4, 7)): + log.info('Completing tutorial steps for %s.', account['username']) + complete_tutorial(args, api, account) + else: + log.debug('Account %s already did the tutorials.', account['username']) + # 6 - Get player profile. + log.debug('Fetching player profile...') + try: + req = api.create_request() + req.get_player_profile() + send_generic_request(req, account, settings=True, inbox=False) + total_req += 1 + time.sleep(random.uniform(.2, .3)) + except Exception as e: + log.exception('Login for account %s failed. Exception occurred ' + + 'while fetching player profile: %s.', + account['username'], + e) + raise LoginSequenceFail('Failed while getting player profile in' + + ' login sequence for account {}.'.format( + account['username'])) + + log.debug('Retrieving Store Items...') + try: # 7 - Make an empty request to retrieve store items. + req = api.create_request() + req.get_store_items() + req.call(False) + + total_req += 1 + time.sleep(random.uniform(.6, 1.1)) + except Exception as e: + log.exception('Login for account %s failed. Exception in ' + + 'retrieving Store Items: %s.', account['username'], + e) + raise LoginSequenceFail('Failed during login sequence.') + + # 8 - Check if there are level up rewards to claim. + log.debug('Checking if there are level up rewards to claim...') + + try: + req = api.create_request() + req.level_up_rewards(level=account['level']) + send_generic_request(req, account, settings=True) + + total_req += 1 + time.sleep(random.uniform(.45, .7)) + except Exception as e: + log.exception('Login for account %s failed. Exception occurred ' + + 'while fetching level-up rewards: %s.', + account['username'], + e) + raise LoginSequenceFail('Failed while getting level-up rewards in' + + ' login sequence for account {}.'.format( + account['username'])) + + log.info('RPC login sequence for account %s successful with %s requests.', + account['username'], + total_req) + + time.sleep(random.uniform(3, 5)) + + if account['buddy'] == 0 and len(account['pokemons']) > 0: + poke_id = random.choice(account['pokemons'].keys()) + req = api.create_request() + req.set_buddy_pokemon(pokemon_id=poke_id) + log.debug('Setting buddy pokemon for %s.', account['username']) + send_generic_request(req, account) + + time.sleep(random.uniform(10, 20)) + + +# Complete minimal tutorial steps. +# API argument needs to be a logged in API instance. +# TODO: Check if game client bundles these requests, or does them separately. +def complete_tutorial(args, api, account): + tutorial_state = account['tutorials'] + if 0 not in tutorial_state: + time.sleep(random.uniform(1, 5)) + req = api.create_request() + req.mark_tutorial_complete(tutorials_completed=0) + log.debug('Sending 0 tutorials_completed for %s.', account['username']) + send_generic_request(req, account, buddy=False, inbox=False) + + time.sleep(random.uniform(0.5, 0.6)) + req = api.create_request() + req.get_player(player_locale=args.player_locale) + send_generic_request(req, account, buddy=False, inbox=False) + + if 1 not in tutorial_state: + time.sleep(random.uniform(5, 12)) + req = api.create_request() + req.set_avatar(player_avatar={ + 'hair': random.randint(1, 5), + 'shirt': random.randint(1, 3), + 'pants': random.randint(1, 2), + 'shoes': random.randint(1, 6), + 'avatar': random.randint(0, 1), + 'eyes': random.randint(1, 4), + 'backpack': random.randint(1, 5) + }) + log.debug('Sending set random player character request for %s.', + account['username']) + send_generic_request(req, account, buddy=False, inbox=False) + + time.sleep(random.uniform(0.3, 0.5)) + req = api.create_request() + req.mark_tutorial_complete(tutorials_completed=1) + log.debug('Sending 1 tutorials_completed for %s.', account['username']) + send_generic_request(req, account, buddy=False, inbox=False) + + time.sleep(random.uniform(0.5, 0.6)) + req = api.create_request() + req.get_player_profile() + log.debug('Fetching player profile for %s...', account['username']) + send_generic_request(req, account, inbox=False) + + if 3 not in tutorial_state: + time.sleep(random.uniform(1, 1.5)) + req = api.create_request() + req.get_download_urls(asset_id=[ + '1a3c2816-65fa-4b97-90eb-0b301c064b7a/1477084786906000', + 'aa8f7687-a022-4773-b900-3a8c170e9aea/1477084794890000', + 'e89109b0-9a54-40fe-8431-12f7826c8194/1477084802881000']) + log.debug('Grabbing some game assets.') + send_generic_request(req, account, inbox=False) + + time.sleep(random.uniform(6, 13)) + req = api.create_request() + starter = random.choice((1, 4, 7)) + req.encounter_tutorial_complete(pokemon_id=starter) + log.debug('Catching the starter for %s.', account['username']) + send_generic_request(req, account, inbox=False) + + time.sleep(random.uniform(0.5, 0.6)) + req = api.create_request() + req.get_player(player_locale=args.player_locale) + send_generic_request(req, account, inbox=False) + + if 4 not in tutorial_state: + time.sleep(random.uniform(5, 12)) + req = api.create_request() + req.claim_codename(codename=account['username']) + log.debug('Claiming codename for %s.', account['username']) + send_generic_request(req, account, inbox=False) + + time.sleep(0.1) + req = api.create_request() + req.get_player(player_locale=args.player_locale) + send_generic_request(req, account, inbox=False) + + time.sleep(random.uniform(1, 1.3)) + req = api.create_request() + req.mark_tutorial_complete(tutorials_completed=4) + log.debug('Sending 4 tutorials_completed for %s.', account['username']) + send_generic_request(req, account, inbox=False) + + if 7 not in tutorial_state: + time.sleep(random.uniform(4, 10)) + req = api.create_request() + req.mark_tutorial_complete(tutorials_completed=7) + log.debug('Sending 7 tutorials_completed for %s.', account['username']) + send_generic_request(req, account, inbox=False) + + # Sleeping before we start scanning to avoid Niantic throttling. + log.debug('And %s is done. Wait for a second, to avoid throttle.', + account['username']) + time.sleep(random.uniform(2, 4)) + return True +>>>>>>> origin/develop def reset_account(account): @@ -174,9 +555,18 @@ def spin_pokestop(pgacc, account, args, fort, step_location): time.sleep(random.uniform(2, 4)) # Don't let Niantic throttle. # Check for reCaptcha. +<<<<<<< HEAD if pgacc.has_captcha(): log.debug('Account encountered a reCaptcha.') return False +======= + if 'CHECK_CHALLENGE' in response['responses']: + captcha_url = response[ + 'responses']['CHECK_CHALLENGE'].challenge_url + if len(captcha_url) > 1: + log.debug('Account encountered a reCaptcha.') + return False +>>>>>>> origin/develop spin_result = response['FORT_SEARCH'].result if spin_result is 1: @@ -228,9 +618,18 @@ def clear_pokemon(pgacc): if not release_p_response: return False +<<<<<<< HEAD if pgacc.has_captcha(): log.info('Account encountered a reCaptcha.') return False +======= + if 'CHECK_CHALLENGE' in release_p_response['responses']: + captcha_url = release_p_response[ + 'responses']['CHECK_CHALLENGE'].challenge_url + if len(captcha_url) > 1: + log.info('Account encountered a reCaptcha.') + return False +>>>>>>> origin/develop release_response = release_p_response['RELEASE_POKEMON'] release_result = release_response.result @@ -263,9 +662,18 @@ def clear_inventory(pgacc): clear_inventory_response = clear_inventory_request( pgacc, item_id, drop_count) +<<<<<<< HEAD if pgacc.has_captcha(): log.info('Account encountered a reCaptcha.') return False +======= + if 'CHECK_CHALLENGE' in resp['responses']: + captcha_url = resp[ + 'responses']['CHECK_CHALLENGE'].challenge_url + if len(captcha_url) > 1: + log.info('Account encountered a reCaptcha.') + return False +>>>>>>> origin/develop if not clear_inventory_response: continue @@ -435,11 +843,12 @@ def next(self, set_name, coords_to_scan): # Check if we're below speed limit for account. last_scanned = account.get('last_scanned', False) - if last_scanned: + if last_scanned and self.kph > 0: seconds_passed = now - last_scanned old_coords = account.get('last_coords', coords_to_scan) distance_m = distance(old_coords, coords_to_scan) + cooldown_time_sec = distance_m / self.kph * 3.6 # Not enough time has passed for this one. diff --git a/pogom/apiRequests.py b/pogom/apiRequests.py new file mode 100644 index 0000000000..fbeba32315 --- /dev/null +++ b/pogom/apiRequests.py @@ -0,0 +1,258 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +import logging + +from pgoapi.utilities import get_cell_ids +from pgoapi.hash_server import BadHashRequestException, HashingOfflineException + +log = logging.getLogger(__name__) + + +class AccountBannedException(Exception): + pass + + +def send_generic_request(req, account, settings=True, buddy=True, inbox=True): + req.check_challenge() + req.get_hatched_eggs() + req.get_holo_inventory(last_timestamp_ms=account['last_timestamp_ms']) + req.check_awarded_badges() + + if settings: + if 'remote_config' in account: + req.download_settings(hash=account['remote_config']['hash']) + else: + req.download_settings() + + if buddy: + req.get_buddy_walked() + + if inbox: + req.get_inbox(is_history=True) + + try: + resp = req.call(False) + except HashingOfflineException: + log.error('Hashing server is unreachable, it might be offline.') + raise + except BadHashRequestException: + log.error('Invalid or expired hashing key: %s.', + req.__parent__.get_hash_server_token()) + raise + + parse_inventory(account, resp) + if settings: + parse_remote_config(account, resp) + + # Clean all unneeded data. + del resp['envelope'].platform_returns[:] + if 'responses' not in resp: + return resp + responses = [ + 'GET_HATCHED_EGGS', 'GET_HOLO_INVENTORY', 'CHECK_AWARDED_BADGES', + 'DOWNLOAD_SETTINGS', 'GET_BUDDY_WALKED', 'GET_INBOX' + ] + for item in responses: + if item in resp['responses']: + del resp['responses'][item] + + log.log(5, 'Response: \n%s', resp) + return resp + + +def parse_remote_config(account, api_response): + if 'DOWNLOAD_REMOTE_CONFIG_VERSION' not in api_response['responses']: + return + + remote_config = api_response['responses']['DOWNLOAD_REMOTE_CONFIG_VERSION'] + if remote_config.result == 0: + raise AccountBannedException('The account is temporarily banned') + + asset_time = remote_config.asset_digest_timestamp_ms / 1000000 + template_time = remote_config.item_templates_timestamp_ms / 1000 + + download_settings = {} + download_settings['hash'] = api_response['responses'][ + 'DOWNLOAD_SETTINGS'].hash + download_settings['asset_time'] = asset_time + download_settings['template_time'] = template_time + + account['remote_config'] = download_settings + + log.debug('Download settings for account %s: %s.', account['username'], + download_settings) + + +# Parse player stats and inventory into account. +def parse_inventory(account, api_response): + if 'GET_HOLO_INVENTORY' not in api_response['responses']: + return + inventory = api_response['responses']['GET_HOLO_INVENTORY'] + parsed_items = 0 + parsed_pokemons = 0 + parsed_eggs = 0 + parsed_incubators = 0 + account['last_timestamp_ms'] = inventory.inventory_delta.new_timestamp_ms + + for item in inventory.inventory_delta.inventory_items: + item_data = item.inventory_item_data + if item_data.HasField('player_stats'): + stats = item_data.player_stats + account['level'] = stats.level + account['spins'] = stats.poke_stop_visits + account['walked'] = stats.km_walked + + log.debug('Parsed %s player stats: level %d, %f km ' + + 'walked, %d spins.', account['username'], + account['level'], account['walked'], account['spins']) + elif item_data.HasField('item'): + item_id = item_data.item.item_id + item_count = item_data.item.count + account['items'][item_id] = item_count + parsed_items += item_count + elif item_data.HasField('egg_incubators'): + incubators = item_data.egg_incubators.egg_incubator + for incubator in incubators: + if incubator.pokemon_id != 0: + left = (incubator.target_km_walked - account['walked']) + log.debug('Egg kms remaining: %.2f', left) + else: + account['incubators'].append({ + 'id': incubator.id, + 'item_id': incubator.item_id, + 'uses_remaining': incubator.uses_remaining + }) + parsed_incubators += 1 + elif item_data.HasField('pokemon_data'): + p_data = item_data.pokemon_data + p_id = p_data.id + if not p_data.is_egg: + account['pokemons'][p_id] = { + 'pokemon_id': p_data.pokemon_id, + 'move_1': p_data.move_1, + 'move_2': p_data.move_2, + 'height': p_data.height_m, + 'weight': p_data.weight_kg, + 'gender': p_data.pokemon_display.gender, + 'cp': p_data.cp, + 'cp_multiplier': p_data.cp_multiplier + } + parsed_pokemons += 1 + else: + if p_data.egg_incubator_id: + # Egg is already incubating. + continue + account['eggs'].append({ + 'id': p_id, + 'km_target': p_data.egg_km_walked_target + }) + parsed_eggs += 1 + log.debug( + 'Parsed %s player inventory: %d items, %d pokemons, %d available' + + ' eggs and %d available incubators.', account['username'], + parsed_items, parsed_pokemons, parsed_eggs, parsed_incubators) + + +def catchRequestException(task): + + def _catch(function): + + def wrapper(*args, **kwargs): + try: + return function(*args, **kwargs) + except Exception as e: + log.exception('Exception while %s with account %s: %s.', task, + kwargs.get('account', args[1])['username'], e) + return False + + return wrapper + + return _catch + + +@catchRequestException('spinning Pokestop') +def fort_search(api, account, fort, step_location): + req = api.create_request() + req.fort_search( + fort_id=fort.id, + fort_latitude=fort.latitude, + fort_longitude=fort.longitude, + player_latitude=step_location[0], + player_longitude=step_location[1]) + return send_generic_request(req, account) + + +@catchRequestException('getting Pokestop details') +def fort_details(api, account, fort): + req = api.create_request() + req.fort_details( + fort_id=fort.id, latitude=fort.latitude, longitude=fort.longitude) + return send_generic_request(req, account) + + +@catchRequestException('encountering Pokémon') +def encounter(api, account, encounter_id, spawnpoint_id, scan_location): + req = api.create_request() + req.encounter( + encounter_id=encounter_id, + spawn_point_id=spawnpoint_id, + player_latitude=scan_location[0], + player_longitude=scan_location[1]) + return send_generic_request(req, account) + + +@catchRequestException('clearing Inventory') +def recycle_inventory_item(api, account, item_id, drop_count): + req = api.create_request() + req.recycle_inventory_item(item_id=item_id, count=drop_count) + return send_generic_request(req, account) + + +@catchRequestException('putting an egg in incubator') +def use_item_egg_incubator(api, account, incubator_id, egg_id): + req = api.create_request() + req.use_item_egg_incubator(item_id=incubator_id, pokemon_id=egg_id) + return send_generic_request(req, account) + + +@catchRequestException('releasing Pokemon') +def release_pokemon(api, account, pokemon_id, release_ids=None): + if release_ids is None: + return False + + req = api.create_request() + req.release_pokemon(pokemon_id=pokemon_id, pokemon_ids=release_ids) + return send_generic_request(req, account) + + +@catchRequestException('getting Rewards') +def level_up_rewards(api, account): + req = api.create_request() + req.level_up_rewards(level=account['level']) + return send_generic_request(req, account) + + +@catchRequestException('downloading map') +def get_map_objects(api, account, location): + cell_ids = get_cell_ids(location[0], location[1]) + timestamps = [0, ]*len(cell_ids) + req = api.create_request() + req.get_map_objects( + latitude=location[0], + longitude=location[1], + since_timestamp_ms=timestamps, + cell_id=cell_ids) + return send_generic_request(req, account) + + +@catchRequestException('getting gym details') +def gym_get_info(api, account, position, gym): + req = api.create_request() + req.gym_get_info( + gym_id=gym['gym_id'], + player_lat_degrees=position[0], + player_lng_degrees=position[1], + gym_lat_degrees=gym['latitude'], + gym_lng_degrees=gym['longitude']) + return send_generic_request(req, account) diff --git a/pogom/app.py b/pogom/app.py index d3985d53b1..34203fea0b 100644 --- a/pogom/app.py +++ b/pogom/app.py @@ -3,6 +3,7 @@ import calendar import logging +import gc from flask import Flask, abort, jsonify, render_template, request, \ make_response, send_from_directory, send_file @@ -19,13 +20,37 @@ from .models import (Pokemon, Gym, Pokestop, ScannedLocation, MainWorker, WorkerStatus, Token, HashKeys, SpawnPoint) -from .utils import now, dottedQuadToNum +from .utils import (get_pokemon_name, get_pokemon_types, get_pokemon_rarity, + now, dottedQuadToNum) +from .transform import transform_from_wgs_to_gcj from .blacklist import fingerprints, get_ip_blacklist log = logging.getLogger(__name__) compress = Compress() +def convert_pokemon_list(pokemon): + args = get_args() + # Performance: disable the garbage collector prior to creating a + # (potentially) large dict with append(). + gc.disable() + + pokemon_result = [] + for p in pokemon: + p['pokemon_name'] = get_pokemon_name(p['pokemon_id']) + p['pokemon_rarity'] = get_pokemon_rarity(p['pokemon_id']) + p['pokemon_types'] = get_pokemon_types(p['pokemon_id']) + p['encounter_id'] = str(p['encounter_id']) + if args.china: + p['latitude'], p['longitude'] = \ + transform_from_wgs_to_gcj(p['latitude'], p['longitude']) + pokemon_result.append(p) + + # Re-enable the GC. + gc.enable() + return pokemon + + class Pogom(Flask): def __init__(self, import_name, **kwargs): @@ -259,9 +284,7 @@ def fullmap(self, statusname=None): if args.on_demand_timeout > 0: self.control_flags['on_demand'].clear() - search_display = True if (args.search_control and - args.on_demand_timeout <= 0) else False - + search_display = (args.search_control and args.on_demand_timeout <= 0) scan_display = False if (args.only_server or args.fixed_location or args.spawnpoint_scanning) else True @@ -385,24 +408,33 @@ def raw_data(self): not args.no_pokemon): if request.args.get('ids'): ids = [int(x) for x in request.args.get('ids').split(',')] - d['pokemons'] = Pokemon.get_active_by_id(ids, swLat, swLng, - neLat, neLng) + d['pokemons'] = convert_pokemon_list( + Pokemon.get_active_by_id(ids, swLat, swLng, neLat, neLng)) elif lastpokemon != 'true': # If this is first request since switch on, load # all pokemon on screen. - d['pokemons'] = Pokemon.get_active(swLat, swLng, neLat, neLng) + d['pokemons'] = convert_pokemon_list( + Pokemon.get_active(swLat, swLng, neLat, neLng)) else: # If map is already populated only request modified Pokemon # since last request time. - d['pokemons'] = Pokemon.get_active(swLat, swLng, neLat, neLng, - timestamp=timestamp) + d['pokemons'] = convert_pokemon_list( + Pokemon.get_active( + swLat, swLng, neLat, neLng, timestamp=timestamp)) if newArea: # If screen is moved add newly uncovered Pokemon to the # ones that were modified since last request time. d['pokemons'] = d['pokemons'] + ( - Pokemon.get_active(swLat, swLng, neLat, neLng, - oSwLat=oSwLat, oSwLng=oSwLng, - oNeLat=oNeLat, oNeLng=oNeLng)) + convert_pokemon_list( + Pokemon.get_active( + swLat, + swLng, + neLat, + neLng, + oSwLat=oSwLat, + oSwLng=oSwLng, + oNeLat=oNeLat, + oNeLng=oNeLng))) if request.args.get('eids'): # Exclude id's of pokemon that are hidden. @@ -413,8 +445,9 @@ def raw_data(self): if request.args.get('reids'): reids = [int(x) for x in request.args.get('reids').split(',')] d['pokemons'] = d['pokemons'] + ( - Pokemon.get_active_by_id(reids, swLat, swLng, - neLat, neLng)) + convert_pokemon_list( + Pokemon.get_active_by_id(reids, swLat, swLng, neLat, + neLng))) d['reids'] = reids if (request.args.get('pokestops', 'true') == 'true' and @@ -547,7 +580,8 @@ def list_pokemon(self): lon = request.args.get('lon', self.current_location[1], type=float) origin_point = LatLng.from_degrees(lat, lon) - for pokemon in Pokemon.get_active(None, None, None, None): + for pokemon in convert_pokemon_list( + Pokemon.get_active(None, None, None, None)): pokemon_point = LatLng.from_degrees(pokemon['latitude'], pokemon['longitude']) diff = pokemon_point - origin_point diff --git a/pogom/captcha.py b/pogom/captcha.py index 2445392bc6..cbf6f3d084 100644 --- a/pogom/captcha.py +++ b/pogom/captcha.py @@ -127,8 +127,17 @@ def captcha_solver_thread(args, account_queue, account_captchas, hash_key, location = account['last_location'] +<<<<<<< HEAD pgacc.set_position(location[0], location[1], location[2]) pgacc.check_login() +======= + if args.jitter: + # Jitter location before uncaptcha attempt. + location = jitter_location(location) + + api.set_position(*location) + check_login(args, account, api, proxy_url) +>>>>>>> origin/develop if not token: token = token_request(args, status, captcha_url) diff --git a/pogom/cluster.py b/pogom/cluster.py new file mode 100644 index 0000000000..2d7aa5b90f --- /dev/null +++ b/pogom/cluster.py @@ -0,0 +1,107 @@ +from .utils import distance +from .transform import intermediate_point + + +class SpawnCluster(object): + def __init__(self, spawnpoint): + self._spawnpoints = [spawnpoint] + self.centroid = (spawnpoint['lat'], spawnpoint['lng']) + self.min_time = spawnpoint['time'] + self.max_time = spawnpoint['time'] + self.spawnpoint_id = spawnpoint['spawnpoint_id'] + self.appears = spawnpoint['appears'] + self.leaves = spawnpoint['leaves'] + + def __getitem__(self, key): + return self._spawnpoints[key] + + def __iter__(self): + for x in self._spawnpoints: + yield x + + def __contains__(self, item): + return item in self._spawnpoints + + def __len__(self): + return len(self._spawnpoints) + + def append(self, spawnpoint): + self.centroid = self.new_centroid(spawnpoint) + + self._spawnpoints.append(spawnpoint) + + if spawnpoint['time'] < self.min_time: + self.min_time = spawnpoint['time'] + + elif spawnpoint['time'] > self.max_time: + self.max_time = spawnpoint['time'] + self.spawnpoint_id = spawnpoint['spawnpoint_id'] + self.appears = spawnpoint['appears'] + self.leaves = spawnpoint['leaves'] + + def get_score(self, spawnpoint, time_threshold): + min_time = min(self.min_time, spawnpoint['time']) + max_time = max(self.max_time, spawnpoint['time']) + sp_position = (spawnpoint['lat'], spawnpoint['lng']) + + if max_time - min_time > time_threshold: + return float('inf') + else: + return distance(sp_position, self.centroid) + + def new_centroid(self, spawnpoint): + sp_count = len(self._spawnpoints) + f = sp_count / (sp_count + 1.0) + new_centroid = intermediate_point( + (spawnpoint['lat'], spawnpoint['lng']), self.centroid, f) + + return new_centroid + + def test_spawnpoint(self, spawnpoint, radius, time_threshold): + # Discard spawn points outside the time frame or too far away. + if self.get_score(spawnpoint, time_threshold) > 2 * radius: + return False + + new_centroid = self.new_centroid(spawnpoint) + + # Check if spawn point is within range of the new centroid. + if (distance((spawnpoint['lat'], spawnpoint['lng']), new_centroid) > + radius): + return False + + # Check if cluster's spawn points remain in range of the new centroid. + if any(distance((x['lat'], x['lng']), new_centroid) > + radius for x in self._spawnpoints): + return False + + return True + + +# Group spawn points with similar spawn times that are close to each other. +def cluster_spawnpoints(spawnpoints, radius=70, time_threshold=240): + # Initialize cluster list with the first spawn point available. + clusters = [SpawnCluster(spawnpoints.pop())] + for sp in spawnpoints: + # Pick the closest cluster compatible to current spawn point. + c = min(clusters, key=lambda x: x.get_score(sp, time_threshold)) + + if c.test_spawnpoint(sp, radius, time_threshold): + c.append(sp) + else: + c = SpawnCluster(sp) + clusters.append(c) + + # Output new spawn points from generated clusters. Use the latest time + # to be sure that every spawn point in the cluster has already spawned. + result = [] + for c in clusters: + result.append({ + 'spawnpoint_id': c.spawnpoint_id, + 'lat': c.centroid[0], + 'lng': c.centroid[1], + 'time': c.max_time, + 'appears': c.appears, + 'leaves': c.leaves + }) + + return result diff --git a/pogom/models.py b/pogom/models.py index 234ce3817b..b5256da45b 100644 --- a/pogom/models.py +++ b/pogom/models.py @@ -7,28 +7,30 @@ import sys import gc import time -import geopy import math import s2sphere from peewee import (InsertQuery, Check, CompositeKey, ForeignKeyField, SmallIntegerField, IntegerField, CharField, DoubleField, BooleanField, DateTimeField, fn, DeleteQuery, FloatField, - TextField, JOIN, OperationalError) + TextField, BigIntegerField, PrimaryKeyField, + JOIN, OperationalError) from playhouse.flask_utils import FlaskDB from playhouse.pool import PooledMySQLDatabase from playhouse.shortcuts import RetryOperationalError, case -from playhouse.migrate import migrate, MySQLMigrator, SqliteMigrator -from playhouse.sqlite_ext import SqliteExtDatabase +from playhouse.migrate import migrate, MySQLMigrator from datetime import datetime, timedelta -from base64 import b64encode from cachetools import TTLCache from cachetools import cached from timeit import default_timer +<<<<<<< HEAD from pogom.gainxp import DITTO_CANDIDATES_IDS, is_ditto, gxp_spin_stops from pogom.pgscout import pgscout_encounter from .utils import (get_pokemon_name, get_pokemon_rarity, get_pokemon_types, +======= +from .utils import (get_pokemon_name, get_pokemon_types, +>>>>>>> origin/develop get_args, cellid, in_radius, date_secs, clock_between, get_move_name, get_move_damage, get_move_energy, get_move_type, calc_pokemon_level) @@ -48,7 +50,11 @@ flaskDb = FlaskDB() cache = TTLCache(maxsize=100, ttl=60 * 5) +<<<<<<< HEAD db_schema_version = 25 +======= +db_schema_version = 22 +>>>>>>> origin/develop class MyRetryDB(RetryOperationalError, PooledMySQLDatabase): @@ -62,27 +68,22 @@ def __init__(self, max_length=191, *args, **kwargs): super(CharField, self).__init__(*args, **kwargs) +class UBigIntegerField(BigIntegerField): + db_field = 'bigint unsigned' + + def init_database(app): - if args.db_type == 'mysql': - log.info('Connecting to MySQL database on %s:%i...', - args.db_host, args.db_port) - db = MyRetryDB( - args.db_name, - user=args.db_user, - password=args.db_pass, - host=args.db_host, - port=args.db_port, - stale_timeout=30, - max_connections=None, - charset='utf8mb4') - else: - log.info('Connecting to local SQLite database') - db = SqliteExtDatabase(args.db, - pragmas=( - ('journal_mode', 'WAL'), - ('mmap_size', 1024 * 1024 * 32), - ('cache_size', 10000), - ('journal_size_limit', 1024 * 1024 * 4),)) + log.info('Connecting to MySQL database on %s:%i...', + args.db_host, args.db_port) + db = MyRetryDB( + args.db_name, + user=args.db_user, + password=args.db_pass, + host=args.db_host, + port=args.db_port, + stale_timeout=30, + max_connections=None, + charset='utf8mb4') # Using internal method as the other way would be using internal var, we # could use initializer but db is initialized later @@ -119,8 +120,8 @@ def get_all(cls): class Pokemon(LatLongModel): # We are base64 encoding the ids delivered by the api # because they are too big for sqlite to handle. - encounter_id = Utf8mb4CharField(primary_key=True, max_length=50) - spawnpoint_id = Utf8mb4CharField(index=True) + encounter_id = UBigIntegerField(primary_key=True) + spawnpoint_id = UBigIntegerField(index=True) pokemon_id = SmallIntegerField(index=True) latitude = DoubleField() longitude = DoubleField() @@ -195,26 +196,7 @@ def get_active(swLat, swLng, neLat, neLng, timestamp=0, oSwLat=None, (Pokemon.latitude <= neLat) & (Pokemon.longitude <= neLng)))) .dicts()) - - # Performance: disable the garbage collector prior to creating a - # (potentially) large dict with append(). - gc.disable() - - pokemon = [] - for p in list(query): - - p['pokemon_name'] = get_pokemon_name(p['pokemon_id']) - p['pokemon_rarity'] = get_pokemon_rarity(p['pokemon_id']) - p['pokemon_types'] = get_pokemon_types(p['pokemon_id']) - if args.china: - p['latitude'], p['longitude'] = \ - transform_from_wgs_to_gcj(p['latitude'], p['longitude']) - pokemon.append(p) - - # Re-enable the GC. - gc.enable() - - return pokemon + return list(query) @staticmethod def get_active_by_id(ids, swLat, swLng, neLat, neLng): @@ -235,24 +217,7 @@ def get_active_by_id(ids, swLat, swLng, neLat, neLng): (Pokemon.longitude <= neLng)) .dicts()) - # Performance: disable the garbage collector prior to creating a - # (potentially) large dict with append(). - gc.disable() - - pokemon = [] - for p in query: - p['pokemon_name'] = get_pokemon_name(p['pokemon_id']) - p['pokemon_rarity'] = get_pokemon_rarity(p['pokemon_id']) - p['pokemon_types'] = get_pokemon_types(p['pokemon_id']) - if args.china: - p['latitude'], p['longitude'] = \ - transform_from_wgs_to_gcj(p['latitude'], p['longitude']) - pokemon.append(p) - - # Re-enable the GC. - gc.enable() - - return pokemon + return list(query) @staticmethod @cached(cache) @@ -362,8 +327,7 @@ class Pokestop(LatLongModel): longitude = DoubleField() last_modified = DateTimeField(index=True) lure_expiration = DateTimeField(null=True, index=True) - active_fort_modifier = Utf8mb4CharField(max_length=50, - null=True, index=True) + active_fort_modifier = SmallIntegerField(null=True, index=True) last_updated = DateTimeField( null=True, index=True, default=datetime.utcnow) @@ -644,7 +608,7 @@ def get_gym(id): .join(Trainer, on=(GymPokemon.trainer_name == Trainer.name)) .where(GymMember.gym_id == id) .where(GymMember.last_scanned > Gym.last_modified) - .order_by(GymMember.cp_decayed.desc()) + .order_by(GymMember.deployment_time.desc()) .distinct() .dicts()) @@ -689,7 +653,7 @@ class Raid(BaseModel): class LocationAltitude(LatLongModel): - cellid = Utf8mb4CharField(primary_key=True, max_length=50) + cellid = UBigIntegerField(primary_key=True) latitude = DoubleField() longitude = DoubleField() last_modified = DateTimeField(index=True, default=datetime.utcnow, @@ -758,7 +722,7 @@ def get_locale(location): class ScannedLocation(LatLongModel): - cellid = Utf8mb4CharField(primary_key=True, max_length=50) + cellid = UBigIntegerField(primary_key=True) latitude = DoubleField() longitude = DoubleField() last_modified = DateTimeField( @@ -906,11 +870,10 @@ def get_by_loc(loc): # spannedlocation records. Otherwise, search through the spawnpoint list # and update scan_spawn_point dict for DB bulk upserting. @staticmethod - def link_spawn_points(scans, initial, spawn_points, distance, - scan_spawn_point, force=False): + def link_spawn_points(scans, initial, spawn_points, distance): + index = 0 + scan_spawn_point = {} for cell, scan in scans.iteritems(): - if initial[cell]['done'] and not force: - continue # Difference in degrees at the equator for 70m is actually 0.00063 # degrees and gets smaller the further north or south you go deg_at_lat = 0.0007 / math.cos(math.radians(scan['loc'][0])) @@ -920,9 +883,11 @@ def link_spawn_points(scans, initial, spawn_points, distance, continue if in_radius((sp['latitude'], sp['longitude']), scan['loc'], distance * 1000): - scan_spawn_point[cell + sp['id']] = { + scan_spawn_point[index] = { 'spawnpoint': sp['id'], 'scannedlocation': cell} + index += 1 + return scan_spawn_point # Return list of dicts for upcoming valid band times. @staticmethod @@ -1205,7 +1170,7 @@ def get_center_of_worker(cls, worker_name): class SpawnPoint(LatLongModel): - id = Utf8mb4CharField(primary_key=True, max_length=50) + id = UBigIntegerField(primary_key=True) latitude = DoubleField() longitude = DoubleField() last_scanned = DateTimeField(index=True) @@ -1240,8 +1205,9 @@ class SpawnPoint(LatLongModel): class Meta: indexes = ((('latitude', 'longitude'), False),) constraints = [Check('earliest_unseen >= 0'), - Check('earliest_unseen < 3600'), - Check('latest_seen >= 0'), Check('latest_seen < 3600')] + Check('earliest_unseen <= 3600'), + Check('latest_seen >= 0'), + Check('latest_seen <= 3600')] # Returns the spawnpoint dict from ID, or a new dict if not found. @staticmethod @@ -1324,69 +1290,16 @@ def get_spawnpoints(swLat, swLng, neLat, neLng, timestamp=0, return list(spawnpoints.values()) - @staticmethod - def get_spawnpoints_in_hex(center, steps): - - log.info('Finding spawnpoints {} steps away.'.format(steps)) - - n, e, s, w = hex_bounds(center, steps) - - query = (SpawnPoint - .select(SpawnPoint.latitude.alias('lat'), - SpawnPoint.longitude.alias('lng'), - SpawnPoint.id, - SpawnPoint.earliest_unseen, - SpawnPoint.latest_seen, - SpawnPoint.kind, - SpawnPoint.links, - )) - query = (query.where((SpawnPoint.latitude <= n) & - (SpawnPoint.latitude >= s) & - (SpawnPoint.longitude >= w) & - (SpawnPoint.longitude <= e) - )) - # Sqlite doesn't support distinct on columns. - if args.db_type == 'mysql': - query = query.distinct(SpawnPoint.id) - else: - query = query.group_by(SpawnPoint.id) - - with SpawnPoint.database().execution_context(): - s = list(query.dicts()) - - # The distance between scan circles of radius 70 in a hex is 121.2436 - # steps - 1 to account for the center circle then add 70 for the edge. - step_distance = ((steps - 1) * 121.2436) + 70 - # Compare spawnpoint list to a circle with radius steps * 120. - # Uses the direct geopy distance between the center and the spawnpoint. - filtered = [] - - for idx, sp in enumerate(s): - if geopy.distance.distance( - center, (sp['lat'], sp['lng'])).meters <= step_distance: - filtered.append(s[idx]) - - # We use 'time' as appearance time as this was how things worked - # previously we now also include 'disappear_time' because we - # can and it is meaningful in a list of spawn data - # the other changes also maintain a similar file format - for sp in filtered: - sp['time'], sp['disappear_time'] = SpawnPoint.start_end(sp) - del sp['earliest_unseen'] - del sp['latest_seen'] - del sp['kind'] - del sp['links'] - sp['spawnpoint_id'] = sp['id'] - del sp['id'] - - return filtered - # Confirm if tth has been found. @staticmethod def tth_found(sp): # Fully indentified if no '?' in links and # latest_seen == earliest_unseen. - return sp['latest_seen'] == sp['earliest_unseen'] + # Warning: python uses modulo as the least residue, not as + # remainder, so we don't apply it to the result. + latest_seen = (sp['latest_seen'] % 3600) + earliest_unseen = (sp['earliest_unseen'] % 3600) + return latest_seen - earliest_unseen == 0 # Return [start, end] in seconds after the hour for the spawn, despawn # time of a spawnpoint. @@ -1560,11 +1473,11 @@ class Meta: class SpawnpointDetectionData(BaseModel): - id = Utf8mb4CharField(primary_key=True, max_length=54) + id = PrimaryKeyField() # Removed ForeignKeyField since it caused MySQL issues. - encounter_id = Utf8mb4CharField(max_length=54) + encounter_id = UBigIntegerField() # Removed ForeignKeyField since it caused MySQL issues. - spawnpoint_id = Utf8mb4CharField(max_length=54, index=True) + spawnpoint_id = UBigIntegerField(index=True) scan_time = DateTimeField() tth_secs = SmallIntegerField(null=True) @@ -1647,9 +1560,13 @@ def classify(sp, scan_loc, now_secs, sighting=None): sp['links'] = sp['kind'].replace('s', '?') if sp['kind'] != 'ssss': - + # Cover all bases, make sure we're using values < 3600. + # Warning: python uses modulo as the least residue, not as + # remainder, so we don't apply it to the result. + residue_unseen = sp['earliest_unseen'] % 3600 + residue_seen = sp['latest_seen'] % 3600 if (not sp['earliest_unseen'] or - sp['earliest_unseen'] != sp['latest_seen'] or + residue_unseen != residue_seen or not tth_found): # New latest_seen will be just before max_gap. @@ -1664,7 +1581,14 @@ def classify(sp, scan_loc, now_secs, sighting=None): # Only ssss spawns from here below. sp['links'] = '+++-' - if sp['earliest_unseen'] == sp['latest_seen']: + + # Cover all bases, make sure we're using values < 3600. + # Warning: python uses modulo as the least residue, not as + # remainder, so we don't apply it to the result. + residue_unseen = sp['earliest_unseen'] % 3600 + residue_seen = sp['latest_seen'] % 3600 + + if residue_unseen == residue_seen: return # Make a sight_list of dicts: @@ -1746,7 +1670,13 @@ def classify(sp, scan_loc, now_secs, sighting=None): def unseen(sp, now_secs): # Return if we already have a tth. - if sp['latest_seen'] == sp['earliest_unseen']: + # Cover all bases, make sure we're using values < 3600. + # Warning: python uses modulo as the least residue, not as + # remainder, so we don't apply it to the result. + residue_unseen = sp['earliest_unseen'] % 3600 + residue_seen = sp['latest_seen'] % 3600 + + if residue_seen == residue_unseen: return False # If now_secs is later than the latest seen return. @@ -1769,7 +1699,7 @@ class Meta: class GymMember(BaseModel): gym_id = Utf8mb4CharField(index=True) - pokemon_uid = Utf8mb4CharField(index=True) + pokemon_uid = UBigIntegerField(index=True) last_scanned = DateTimeField(default=datetime.utcnow, index=True) deployment_time = DateTimeField() cp_decayed = SmallIntegerField() @@ -1779,7 +1709,7 @@ class Meta: class GymPokemon(BaseModel): - pokemon_uid = Utf8mb4CharField(primary_key=True, max_length=50) + pokemon_uid = UBigIntegerField(primary_key=True) pokemon_id = SmallIntegerField() cp = SmallIntegerField() trainer_name = Utf8mb4CharField(index=True) @@ -1983,7 +1913,11 @@ def perform_pgscout(p): # todo: this probably shouldn't _really_ be in "models" anymore, but w/e. def parse_map(args, map_dict, scan_coords, scan_location, db_update_queue, +<<<<<<< HEAD wh_update_queue, key_scheduler, pgacc, status, now_date, account, +======= + wh_update_queue, key_scheduler, api, status, now_date, account, +>>>>>>> origin/develop account_sets): pokemon = {} pokestops = {} @@ -2111,7 +2045,8 @@ def parse_map(args, map_dict, scan_coords, scan_location, db_update_queue, if not wild_pokemon and not nearby_pokemon: # ...and there are no gyms/pokestops then it's unusable/bad. if not forts: - log.warning('Bad scan. Parsing found absolutely nothing.') + log.warning('Bad scan. Parsing found absolutely nothing' + + ' using account %s.', account['username']) log.info('Common causes: captchas or IP bans.') elif not args.no_pokemon: # When gym scanning we'll go over the speed limit @@ -2127,8 +2062,7 @@ def parse_map(args, map_dict, scan_coords, scan_location, db_update_queue, just_completed = not done_already and scan_location['done'] if wild_pokemon and not args.no_pokemon: - encounter_ids = [b64encode(str(p.encounter_id)) - for p in wild_pokemon] + encounter_ids = [p.encounter_id for p in wild_pokemon] # For all the wild Pokemon we found check if an active Pokemon is in # the database. with Pokemon.database().execution_context(): @@ -2148,22 +2082,21 @@ def parse_map(args, map_dict, scan_coords, scan_location, db_update_queue, clear_pokemon(pgacc) for p in wild_pokemon: - - sp = SpawnPoint.get_by_id(p.spawn_point_id, p.latitude, + spawn_id = int(p.spawn_point_id, 16) + sp = SpawnPoint.get_by_id(spawn_id, p.latitude, p.longitude) - spawn_points[p.spawn_point_id] = sp + spawn_points[spawn_id] = sp sp['missed_count'] = 0 sighting = { - 'id': b64encode(str(p.encounter_id)) + '_' + str(now_secs), - 'encounter_id': b64encode(str(p.encounter_id)), - 'spawnpoint_id': p.spawn_point_id, + 'encounter_id': p.encounter_id, + 'spawnpoint_id': spawn_id, 'scan_time': now_date, 'tth_secs': None } # Keep a list of sp_ids to return. - sp_id_list.append(p.spawn_point_id) + sp_id_list.append(spawn_id) # time_till_hidden_ms was overflowing causing a negative integer. # It was also returning a value above 3.6M ms. @@ -2171,7 +2104,14 @@ def parse_map(args, map_dict, scan_coords, scan_location, db_update_queue, d_t_secs = date_secs(datetime.utcfromtimestamp( (p.last_modified_timestamp_ms + p.time_till_hidden_ms) / 1000.0)) - if (sp['latest_seen'] != sp['earliest_unseen'] or + + # Cover all bases, make sure we're using values < 3600. + # Warning: python uses modulo as the least residue, not as + # remainder, so we don't apply it to the result. + residue_unseen = sp['earliest_unseen'] % 3600 + residue_seen = sp['latest_seen'] % 3600 + + if (residue_seen != residue_unseen or not sp['last_scanned']): log.info('TTH found for spawnpoint %s.', sp['id']) sighting['tth_secs'] = d_t_secs @@ -2183,7 +2123,11 @@ def parse_map(args, map_dict, scan_coords, scan_location, db_update_queue, sp['latest_seen'] = d_t_secs sp['earliest_unseen'] = d_t_secs +<<<<<<< HEAD scan_spawn_points[scan_location['cellid'] + sp['id']] = { +======= + scan_spawn_points[len(scan_spawn_points)+1] = { +>>>>>>> origin/develop 'spawnpoint': sp['id'], 'scannedlocation': scan_location['cellid']} if not sp['last_scanned']: @@ -2209,8 +2153,7 @@ def parse_map(args, map_dict, scan_coords, scan_location, db_update_queue, sp['last_scanned'] = datetime.utcfromtimestamp( p.last_modified_timestamp_ms / 1000.0) - if ((b64encode(str(p.encounter_id)), p.spawn_point_id) - in encountered_pokemon): + if ((p.encounter_id, spawn_id) in encountered_pokemon): # If Pokemon has been encountered before don't process it. skipped += 1 continue @@ -2253,8 +2196,8 @@ def parse_map(args, map_dict, scan_coords, scan_location, db_update_queue, key_scheduler) pokemon[p.encounter_id] = { - 'encounter_id': b64encode(str(p.encounter_id)), - 'spawnpoint_id': p.spawn_point_id, + 'encounter_id': p.encounter_id, + 'spawnpoint_id': spawn_id, 'pokemon_id': pokemon_id, 'latitude': p.latitude, 'longitude': p.longitude, @@ -2375,7 +2318,6 @@ def parse_map(args, map_dict, scan_coords, scan_location, db_update_queue, # changed don't process it. stopsskipped += 1 continue - pokestops[f.id] = { 'pokestop_id': f.id, 'enabled': f.enabled, @@ -2396,7 +2338,7 @@ def parse_map(args, map_dict, scan_coords, scan_location, db_update_queue, l_e = calendar.timegm(lure_expiration.timetuple()) wh_pokestop = pokestops[f.id].copy() wh_pokestop.update({ - 'pokestop_id': b64encode(str(f.id)), + 'pokestop_id': f.id, 'last_modified': f.last_modified_timestamp_ms, 'lure_expiration': l_e, }) @@ -2404,7 +2346,7 @@ def parse_map(args, map_dict, scan_coords, scan_location, db_update_queue, # Currently, there are only stops and gyms. elif not args.no_gyms and f.type == 0: - b64_gym_id = b64encode(str(f.id)) + b64_gym_id = str(f.id) gym_display = f.gym_display raid_info = f.raid_info # Send gyms to webhooks. @@ -2532,8 +2474,12 @@ def parse_map(args, map_dict, scan_coords, scan_location, db_update_queue, for f in forts: # Spin Pokestop with 50% chance. if f.type == 1 and pokestop_spinnable(f, scan_coords): +<<<<<<< HEAD if spin_pokestop(pgacc, account, args, f, scan_coords): incubate_eggs(pgacc) +======= + spin_pokestop(api, account, args, f, scan_coords) +>>>>>>> origin/develop # Helping out the GC. del forts @@ -2581,9 +2527,13 @@ def parse_map(args, map_dict, scan_coords, scan_location, db_update_queue, if (not SpawnPoint.tth_found(sp) and scan_location['done'] and (now_secs - sp['latest_seen'] - args.spawn_delay) % 3600 < 60): + # Warning: python uses modulo as the least residue, not as + # remainder, so we don't apply it to the result. Just a + # safety measure until we can guarantee there's never a negative + # result. log.warning('Spawnpoint %s was unable to locate a TTH, with ' 'only %ss after Pokemon last seen.', sp['id'], - (now_secs - sp['latest_seen']) % 3600) + (now_secs % 3600 - sp['latest_seen'] % 3600)) log.info('Restarting current 15 minute search for TTH.') if sp['id'] not in sp_id_list: SpawnpointDetectionData.classify(sp, scan_location, now_secs) @@ -2719,12 +2669,25 @@ def encounter_pokemon(args, pokemon, account, pgacc, account_sets, status, # Handle errors. if enc_responses: # Check for captcha. +<<<<<<< HEAD # Throw warning but finish parsing. if hlvl_pgacc.has_captcha(): # Flag account. log.error('Account %s encountered a captcha.' + ' Account will not be used.', hlvl_account['username']) +======= + if 'CHECK_CHALLENGE' in enc_responses: + captcha_url = enc_responses['CHECK_CHALLENGE'].challenge_url + + # Throw warning but finish parsing. + if len(captcha_url) > 1: + # Flag account. + hlvl_account['captcha'] = True + log.error('Account %s encountered a captcha.' + + ' Account will not be used.', + hlvl_account['username']) +>>>>>>> origin/develop if ('ENCOUNTER' in enc_responses and enc_responses['ENCOUNTER'].status != 1): @@ -2779,7 +2742,7 @@ def parse_gyms(args, gym_responses, wh_update_queue, db_update_queue): if 'gym-info' in args.wh_types: webhook_data = { - 'id': b64encode(str(gym_id)), + 'id': str(gym_id), 'latitude': gym_state.pokemon_fort_proto.latitude, 'longitude': gym_state.pokemon_fort_proto.longitude, 'team': gym_state.pokemon_fort_proto.owned_by_team, @@ -2985,13 +2948,7 @@ def clean_db_loop(args): def bulk_upsert(cls, data, db): num_rows = len(data.values()) i = 0 - - if args.db_type == 'mysql': - step = 250 - else: - # SQLite has a default max number of parameters of 999, - # so we need to limit how many rows we insert for it. - step = 50 + step = 250 with db.atomic(): while i < num_rows: @@ -3001,15 +2958,13 @@ def bulk_upsert(cls, data, db): # unable to recognize strings to update unicode keys for # foreign key fields, thus giving lots of foreign key # constraint errors. - if args.db_type == 'mysql': - db.execute_sql('SET FOREIGN_KEY_CHECKS=0;') + db.execute_sql('SET FOREIGN_KEY_CHECKS=0;') # Use peewee's own implementation of the insert_many() method. InsertQuery(cls, rows=data.values()[ i:min(i + step, num_rows)]).upsert().execute() - if args.db_type == 'mysql': - db.execute_sql('SET FOREIGN_KEY_CHECKS=1;') + db.execute_sql('SET FOREIGN_KEY_CHECKS=1;') except Exception as e: # If there is a DB table constraint error, dump the data and @@ -3063,39 +3018,37 @@ def drop_tables(db): def verify_table_encoding(db): - if args.db_type == 'mysql': - with db.execution_context(): + with db.execution_context(): - cmd_sql = ''' - SELECT table_name FROM information_schema.tables WHERE - table_collation != "utf8mb4_unicode_ci" - AND table_schema = "%s"; - ''' % args.db_name - change_tables = db.execute_sql(cmd_sql) + cmd_sql = ''' + SELECT table_name FROM information_schema.tables WHERE + table_collation != "utf8mb4_unicode_ci" + AND table_schema = "%s"; + ''' % args.db_name + change_tables = db.execute_sql(cmd_sql) - cmd_sql = "SHOW tables;" - tables = db.execute_sql(cmd_sql) + cmd_sql = "SHOW tables;" + tables = db.execute_sql(cmd_sql) - if change_tables.rowcount > 0: - log.info('Changing collation and charset on %s tables.', - change_tables.rowcount) + if change_tables.rowcount > 0: + log.info('Changing collation and charset on %s tables.', + change_tables.rowcount) - if change_tables.rowcount == tables.rowcount: - log.info('Changing whole database,' + - ' this might a take while.') + if change_tables.rowcount == tables.rowcount: + log.info('Changing whole database,' + + ' this might a take while.') - db.execute_sql('SET FOREIGN_KEY_CHECKS=0;') - for table in change_tables: - log.debug('Changing collation and charset on table %s.', - table[0]) - cmd_sql = '''ALTER TABLE %s CONVERT TO CHARACTER SET utf8mb4 - COLLATE utf8mb4_unicode_ci;''' % str(table[0]) - db.execute_sql(cmd_sql) - db.execute_sql('SET FOREIGN_KEY_CHECKS=1;') + db.execute_sql('SET FOREIGN_KEY_CHECKS=0;') + for table in change_tables: + log.debug('Changing collation and charset on table %s.', + table[0]) + cmd_sql = '''ALTER TABLE %s CONVERT TO CHARACTER SET utf8mb4 + COLLATE utf8mb4_unicode_ci;''' % str(table[0]) + db.execute_sql(cmd_sql) + db.execute_sql('SET FOREIGN_KEY_CHECKS=1;') def verify_database_schema(db): - db.connect() if not Versions.table_exists(): db.create_tables([Versions]) @@ -3114,7 +3067,9 @@ def verify_database_schema(db): db_ver = Versions.get(Versions.key == 'schema_version').val if db_ver < db_schema_version: - database_migrate(db, db_ver) + if not database_migrate(db, db_ver): + log.error('Error migrating database') + sys.exit(1) elif db_ver > db_schema_version: log.error('Your database version (%i) appears to be newer than ' @@ -3134,10 +3089,7 @@ def database_migrate(db, old_ver): old_ver, db_schema_version) # Perform migrations here. - if args.db_type == 'mysql': - migrator = MySQLMigrator(db) - else: - migrator = SqliteMigrator(db) + migrator = MySQLMigrator(db) if old_ver < 2: migrate(migrator.add_column('pokestop', 'encounter_id', @@ -3226,79 +3178,75 @@ def database_migrate(db, old_ver): ) if old_ver < 15: - # we don't have to touch sqlite because it has REAL and INTEGER only - if args.db_type == 'mysql': - db.execute_sql('ALTER TABLE `pokemon` ' - 'MODIFY COLUMN `weight` FLOAT NULL DEFAULT NULL,' - 'MODIFY COLUMN `height` FLOAT NULL DEFAULT NULL,' - 'MODIFY COLUMN `gender` SMALLINT NULL DEFAULT NULL' - ';') + db.execute_sql('ALTER TABLE `pokemon` ' + 'MODIFY COLUMN `weight` FLOAT NULL DEFAULT NULL,' + 'MODIFY COLUMN `height` FLOAT NULL DEFAULT NULL,' + 'MODIFY COLUMN `gender` SMALLINT NULL DEFAULT NULL' + ';') if old_ver < 16: log.info('This DB schema update can take some time. ' 'Please be patient.') # change some column types from INT to SMALLINT - # we don't have to touch sqlite because it has INTEGER only - if args.db_type == 'mysql': - db.execute_sql( - 'ALTER TABLE `pokemon` ' - 'MODIFY COLUMN `pokemon_id` SMALLINT NOT NULL,' - 'MODIFY COLUMN `individual_attack` SMALLINT ' - 'NULL DEFAULT NULL,' - 'MODIFY COLUMN `individual_defense` SMALLINT ' - 'NULL DEFAULT NULL,' - 'MODIFY COLUMN `individual_stamina` SMALLINT ' - 'NULL DEFAULT NULL,' - 'MODIFY COLUMN `move_1` SMALLINT NULL DEFAULT NULL,' - 'MODIFY COLUMN `move_2` SMALLINT NULL DEFAULT NULL;' - ) - db.execute_sql( - 'ALTER TABLE `gym` ' - 'MODIFY COLUMN `team_id` SMALLINT NOT NULL,' - 'MODIFY COLUMN `guard_pokemon_id` SMALLINT NOT NULL;' - ) - db.execute_sql( - 'ALTER TABLE `scannedlocation` ' - 'MODIFY COLUMN `band1` SMALLINT NOT NULL,' - 'MODIFY COLUMN `band2` SMALLINT NOT NULL,' - 'MODIFY COLUMN `band3` SMALLINT NOT NULL,' - 'MODIFY COLUMN `band4` SMALLINT NOT NULL,' - 'MODIFY COLUMN `band5` SMALLINT NOT NULL,' - 'MODIFY COLUMN `midpoint` SMALLINT NOT NULL,' - 'MODIFY COLUMN `width` SMALLINT NOT NULL;' - ) - db.execute_sql( - 'ALTER TABLE `spawnpoint` ' - 'MODIFY COLUMN `latest_seen` SMALLINT NOT NULL,' - 'MODIFY COLUMN `earliest_unseen` SMALLINT NOT NULL;' - ) - db.execute_sql( - 'ALTER TABLE `spawnpointdetectiondata` ' - 'MODIFY COLUMN `tth_secs` SMALLINT NULL DEFAULT NULL;' - ) - db.execute_sql( - 'ALTER TABLE `versions` ' - 'MODIFY COLUMN `val` SMALLINT NOT NULL;' - ) - db.execute_sql( - 'ALTER TABLE `gympokemon` ' - 'MODIFY COLUMN `pokemon_id` SMALLINT NOT NULL,' - 'MODIFY COLUMN `cp` SMALLINT NOT NULL,' - 'MODIFY COLUMN `num_upgrades` SMALLINT NULL DEFAULT NULL,' - 'MODIFY COLUMN `move_1` SMALLINT NULL DEFAULT NULL,' - 'MODIFY COLUMN `move_2` SMALLINT NULL DEFAULT NULL,' - 'MODIFY COLUMN `stamina` SMALLINT NULL DEFAULT NULL,' - 'MODIFY COLUMN `stamina_max` SMALLINT NULL DEFAULT NULL,' - 'MODIFY COLUMN `iv_defense` SMALLINT NULL DEFAULT NULL,' - 'MODIFY COLUMN `iv_stamina` SMALLINT NULL DEFAULT NULL,' - 'MODIFY COLUMN `iv_attack` SMALLINT NULL DEFAULT NULL;' - ) - db.execute_sql( - 'ALTER TABLE `trainer` ' - 'MODIFY COLUMN `team` SMALLINT NOT NULL,' - 'MODIFY COLUMN `level` SMALLINT NOT NULL;' - ) + db.execute_sql( + 'ALTER TABLE `pokemon` ' + 'MODIFY COLUMN `pokemon_id` SMALLINT NOT NULL,' + 'MODIFY COLUMN `individual_attack` SMALLINT ' + 'NULL DEFAULT NULL,' + 'MODIFY COLUMN `individual_defense` SMALLINT ' + 'NULL DEFAULT NULL,' + 'MODIFY COLUMN `individual_stamina` SMALLINT ' + 'NULL DEFAULT NULL,' + 'MODIFY COLUMN `move_1` SMALLINT NULL DEFAULT NULL,' + 'MODIFY COLUMN `move_2` SMALLINT NULL DEFAULT NULL;' + ) + db.execute_sql( + 'ALTER TABLE `gym` ' + 'MODIFY COLUMN `team_id` SMALLINT NOT NULL,' + 'MODIFY COLUMN `guard_pokemon_id` SMALLINT NOT NULL;' + ) + db.execute_sql( + 'ALTER TABLE `scannedlocation` ' + 'MODIFY COLUMN `band1` SMALLINT NOT NULL,' + 'MODIFY COLUMN `band2` SMALLINT NOT NULL,' + 'MODIFY COLUMN `band3` SMALLINT NOT NULL,' + 'MODIFY COLUMN `band4` SMALLINT NOT NULL,' + 'MODIFY COLUMN `band5` SMALLINT NOT NULL,' + 'MODIFY COLUMN `midpoint` SMALLINT NOT NULL,' + 'MODIFY COLUMN `width` SMALLINT NOT NULL;' + ) + db.execute_sql( + 'ALTER TABLE `spawnpoint` ' + 'MODIFY COLUMN `latest_seen` SMALLINT NOT NULL,' + 'MODIFY COLUMN `earliest_unseen` SMALLINT NOT NULL;' + ) + db.execute_sql( + 'ALTER TABLE `spawnpointdetectiondata` ' + 'MODIFY COLUMN `tth_secs` SMALLINT NULL DEFAULT NULL;' + ) + db.execute_sql( + 'ALTER TABLE `versions` ' + 'MODIFY COLUMN `val` SMALLINT NOT NULL;' + ) + db.execute_sql( + 'ALTER TABLE `gympokemon` ' + 'MODIFY COLUMN `pokemon_id` SMALLINT NOT NULL,' + 'MODIFY COLUMN `cp` SMALLINT NOT NULL,' + 'MODIFY COLUMN `num_upgrades` SMALLINT NULL DEFAULT NULL,' + 'MODIFY COLUMN `move_1` SMALLINT NULL DEFAULT NULL,' + 'MODIFY COLUMN `move_2` SMALLINT NULL DEFAULT NULL,' + 'MODIFY COLUMN `stamina` SMALLINT NULL DEFAULT NULL,' + 'MODIFY COLUMN `stamina_max` SMALLINT NULL DEFAULT NULL,' + 'MODIFY COLUMN `iv_defense` SMALLINT NULL DEFAULT NULL,' + 'MODIFY COLUMN `iv_stamina` SMALLINT NULL DEFAULT NULL,' + 'MODIFY COLUMN `iv_attack` SMALLINT NULL DEFAULT NULL;' + ) + db.execute_sql( + 'ALTER TABLE `trainer` ' + 'MODIFY COLUMN `team` SMALLINT NOT NULL,' + 'MODIFY COLUMN `level` SMALLINT NOT NULL;' + ) # add some missing indexes migrate( @@ -3356,6 +3304,7 @@ def database_migrate(db, old_ver): SmallIntegerField(null=False, default=0))) if old_ver < 21: +<<<<<<< HEAD migrate( migrator.add_column('pokemon', 'catch_prob_1', DoubleField(null=True)), @@ -3409,6 +3358,94 @@ def database_migrate(db, old_ver): migrator.add_column('gym', 'shiny', BooleanField(null=True)) ) +======= + # First rename all tables being modified. + db.execute_sql('RENAME TABLE `pokemon` TO `pokemon_old`;') + db.execute_sql( + 'RENAME TABLE `locationaltitude` TO `locationaltitude_old`;') + db.execute_sql( + 'RENAME TABLE `scannedlocation` TO `scannedlocation_old`;') + db.execute_sql('RENAME TABLE `spawnpoint` TO `spawnpoint_old`;') + db.execute_sql('RENAME TABLE `spawnpointdetectiondata` TO ' + + '`spawnpointdetectiondata_old`;') + db.execute_sql('RENAME TABLE `gymmember` TO `gymmember_old`;') + db.execute_sql('RENAME TABLE `gympokemon` TO `gympokemon_old`;') + db.execute_sql( + 'RENAME TABLE `scanspawnpoint` TO `scanspawnpoint_old`;') + # Then create all tables that we renamed with the proper fields. + create_tables(db) + # Insert data back with the correct format + db.execute_sql( + 'INSERT INTO `pokemon` SELECT ' + + 'FROM_BASE64(encounter_id) as encounter_id, ' + + 'CONV(spawnpoint_id, 16,10) as spawnpoint_id, ' + + 'pokemon_id, latitude, longitude, disappear_time, ' + + 'individual_attack, individual_defense, individual_stamina, ' + + 'move_1, move_2, cp, cp_multiplier, weight, height, gender, ' + + 'form, last_modified ' + + 'FROM `pokemon_old`;') + db.execute_sql( + 'INSERT INTO `locationaltitude` SELECT ' + + 'CONV(cellid, 16,10) as cellid, ' + + 'latitude, longitude, last_modified, altitude ' + + 'FROM `locationaltitude_old`;') + db.execute_sql( + 'INSERT INTO `scannedlocation` SELECT ' + + 'CONV(cellid, 16,10) as cellid, ' + + 'latitude, longitude, last_modified, done, band1, band2, band3, ' + + 'band4, band5, midpoint, width ' + + 'FROM `scannedlocation_old`;') + db.execute_sql( + 'INSERT INTO `spawnpoint` SELECT ' + + 'CONV(id, 16,10) as id, ' + + 'latitude, longitude, last_scanned, kind, links, missed_count, ' + + 'latest_seen, earliest_unseen ' + + 'FROM `spawnpoint_old`;') + db.execute_sql( + 'INSERT INTO `spawnpointdetectiondata` ' + + '(encounter_id, spawnpoint_id, scan_time, tth_secs) SELECT ' + + 'FROM_BASE64(encounter_id) as encounter_id, ' + + 'CONV(spawnpoint_id, 16,10) as spawnpoint_id, ' + + 'scan_time, tth_secs ' + + 'FROM `spawnpointdetectiondata_old`;') + # A simple alter table does not work ¯\_(ツ)_/¯ + db.execute_sql( + 'INSERT INTO `gymmember` SELECT * FROM `gymmember_old`;') + db.execute_sql( + 'INSERT INTO `gympokemon` SELECT * FROM `gympokemon_old`;') + db.execute_sql( + 'INSERT INTO `scanspawnpoint` SELECT ' + + 'CONV(scannedlocation_id, 16,10) as scannedlocation_id, ' + + 'CONV(spawnpoint_id, 16,10) as spawnpoint_id ' + + 'FROM `scanspawnpoint_old`;') + db.execute_sql( + 'ALTER TABLE `pokestop` MODIFY active_fort_modifier SMALLINT(6);') + # Drop all _old tables + db.execute_sql('DROP TABLE `scanspawnpoint_old`;') + db.execute_sql('DROP TABLE `pokemon_old`;') + db.execute_sql('DROP TABLE `locationaltitude_old`;') + db.execute_sql('DROP TABLE `spawnpointdetectiondata_old`;') + db.execute_sql('DROP TABLE `scannedlocation_old`;') + db.execute_sql('DROP TABLE `spawnpoint_old`;') + db.execute_sql('DROP TABLE `gymmember_old`;') + db.execute_sql('DROP TABLE `gympokemon_old`;') + + if old_ver < 22: + # Drop and add CONSTRAINT_2 with the <= fix. + db.execute_sql('ALTER TABLE `spawnpoint` ' + 'DROP CONSTRAINT CONSTRAINT_2;') + db.execute_sql('ALTER TABLE `spawnpoint` ' + 'ADD CONSTRAINT CONSTRAINT_2 ' + + 'CHECK (`earliest_unseen` <= 3600);') + + # Drop and add CONSTRAINT_4 with the <= fix. + db.execute_sql('ALTER TABLE `spawnpoint` ' + 'DROP CONSTRAINT CONSTRAINT_4;') + db.execute_sql('ALTER TABLE `spawnpoint` ' + 'ADD CONSTRAINT CONSTRAINT_4 CHECK ' + + '(`latest_seen` <= 3600);') +>>>>>>> origin/develop # Always log that we're done. log.info('Schema upgrade complete.') + return True diff --git a/pogom/proxy.py b/pogom/proxy.py index 2f0ecef5b2..0e0e22e6d3 100644 --- a/pogom/proxy.py +++ b/pogom/proxy.py @@ -6,6 +6,7 @@ import sys import time +from threading import Thread from random import randint from utils import get_async_requests_session @@ -305,6 +306,24 @@ def get_new_proxy(args): return lp, args.proxy[lp] +def initialize_proxies(args): + # Processing proxies if set (load from file, check and overwrite old + # args.proxy with new working list). + args.proxy = load_proxies(args) + + if args.proxy and not args.proxy_skip_check: + args.proxy = check_proxies(args, args.proxy) + + # Run periodical proxy refresh thread. + if (args.proxy_file is not None) and (args.proxy_refresh > 0): + t = Thread(target=proxies_refresher, + name='proxy-refresh', args=(args,)) + t.daemon = True + t.start() + else: + log.info('Periodical proxies refresh disabled.') + + # Background handler for completed proxy check requests. # Currently doesn't do anything. def __proxy_check_completed(sess, resp): diff --git a/pogom/schedulers.py b/pogom/schedulers.py index 15c9f213a2..13abae7a6d 100644 --- a/pogom/schedulers.py +++ b/pogom/schedulers.py @@ -47,7 +47,6 @@ import itertools import logging import math -import json import time import sys from timeit import default_timer @@ -64,6 +63,7 @@ from .utils import now, cur_sec, cellid, distance from .altitude import get_altitude from .geofence import Geofences +from .cluster import cluster_spawnpoints log = logging.getLogger(__name__) @@ -352,30 +352,47 @@ def __init__(self, queues, status, args): self.step_distance = 0.070 self.step_limit = args.step_limit - self.locations = False + self.locations = [] + + self.cluster_range = 70 + if self.args.jitter: + self.cluster_range = 65 # Generate locations is called when the locations list is cleared - the # first time it scans or after a location change. def _generate_locations(self): - # Attempt to load spawns from file. - if self.args.spawnpoint_scanning != 'nofile': - log.debug('Loading spawn points from json file @ %s', - self.args.spawnpoint_scanning) - try: - with open(self.args.spawnpoint_scanning) as file: - self.locations = json.load(file) - except ValueError as e: - log.error('JSON error: %s; will fallback to database', repr(e)) - except IOError as e: - log.error( - 'Error opening json file: %s; will fallback to database', - repr(e)) - # No locations yet? Try the database! - if not self.locations: - log.debug('Loading spawn points from database') - self.locations = SpawnPoint.get_spawnpoints_in_hex( - self.scan_location, self.args.step_limit) + if not self.locations and not self.args.no_pokemon: + log.debug('Loading spawn points from database.') + + spawns = SpawnPoint.select_in_hex_by_location( + self.scan_location, self.step_limit) + + log.debug('Loaded %s spawn points from database.' % len(spawns)) + + for sp in spawns: + time, disappear_time = SpawnPoint.start_end(sp) + + if time > cur_sec(): + # Hasn't spawn in the current hour. + from_now = time - cur_sec() + appears = now() + from_now + else: + # Won't spawn until next hour. + late_by = cur_sec() - time + appears = now() + 3600 - late_by + + duration = (disappear_time - time) % 3600 + leaves = appears + duration + + self.locations.append({ + 'spawnpoint_id': sp['id'], + 'lat': sp['latitude'], + 'lng': sp['longitude'], + 'time': time, + 'appears': appears, + 'leaves': leaves + }) # Geofence spawnpoints. if self.geofences.is_enabled(): @@ -387,57 +404,17 @@ def _generate_locations(self): sys.exit() # Well shit... - # if not self.locations: - # raise Exception('No availabe spawn points!') - - # locations[]: - # {"lat": 37.53079079414139, "lng": -122.28811690874117, - # "spawnpoint_id": "808f9f1601d", "time": 511 - - log.info('Total of %d spawns to track', len(self.locations)) - - # locations.sort(key=itemgetter('time')) - - if self.args.verbose: - for i in self.locations: - sec = i['time'] % 60 - minute = (i['time'] / 60) % 60 - m = 'Scan [{:02}:{:02}] ({}) @ {},{}'.format( - minute, sec, i['time'], i['lat'], i['lng']) - log.debug(m) + if not self.locations: + raise Exception('No available spawn points!') - # 'time' from json and db alike has been munged to appearance time as - # seconds after the hour. - # Here we'll convert that to a real timestamp. - for location in self.locations: - # For a scan which should cover all CURRENT pokemon, we can offset - # the comparison time by 15 minutes so that the "appears" time - # won't be rolled over to the next hour. - - # TODO: Make it work. The original logic (commented out) was - # producing bogus results if your first scan was in the last - # 15 minute of the hour. Wrapping my head around this isn't - # work right now, so I'll just drop the feature for the time - # being. It does need to come back so that - # repositioning/pausing works more nicely, but we can live - # without it too. - - # if sps_scan_current: - # cursec = (location['time'] + 900) % 3600 - # else: - cursec = location['time'] - - if cursec > cur_sec(): - # Hasn't spawn in the current hour. - from_now = location['time'] - cur_sec() - appears = now() + from_now - else: - # Won't spawn till next hour. - late_by = cur_sec() - location['time'] - appears = now() + 3600 - late_by + log.info('Tracking a total of %d spawn points.', len(self.locations)) - location['appears'] = appears - location['leaves'] = appears + 900 + # Cluster spawnpoints. + if self.args.ss_cluster_time > 0: + self.locations = cluster_spawnpoints( + self.locations, self.cluster_range, self.args.ss_cluster_time) + log.info('Compressed spawn points into %d clusters.', + len(self.locations)) # Put the spawn points in order of next appearance time. self.locations.sort(key=itemgetter('appears')) @@ -445,11 +422,10 @@ def _generate_locations(self): # Match expected structure: # locations = [((lat, lng, alt), ts_appears, ts_leaves),...] retset = [] - for step, location in enumerate(self.locations, 1): - altitude = get_altitude(self.args, [location['lat'], - location['lng']]) - retset.append((step, (location['lat'], location['lng'], altitude), - location['appears'], location['leaves'])) + for step, sp in enumerate(self.locations, 1): + altitude = get_altitude(self.args, [sp['lat'], sp['lng']]) + retset.append((step, (sp['lat'], sp['lng'], altitude), + sp['appears'], sp['leaves'])) return retset @@ -457,7 +433,7 @@ def _generate_locations(self): def schedule(self): if not self.scan_location: log.warning( - 'Cannot schedule work until scan location has been set') + 'Cannot schedule work until scan location has been set.') return # SpawnScan needs to calculate the list every time, since the times @@ -471,9 +447,50 @@ def schedule(self): log.debug("Added location {}".format(location)) # Clear the locations list so it gets regenerated next cycle. - self.locations = None + self.locations = [] self.ready = True + def next_item(self, status): + step, step_location, appears, leaves = self.queues[0].get() + + wait = 0 + wait_msg = 'Waiting for item from queue.' + + worker_loc = (status['latitude'], status['longitude']) + if worker_loc[0] and worker_loc[1] and self.args.kph > 0: + now_date = datetime.utcnow() + last_action = status['last_scan_date'] + meters = distance(step_location, worker_loc) + wait = int(max(meters / self.args.kph * 3.6 + - (now_date - last_action).total_seconds(), 0)) + if wait > 0: + wait_msg = 'Moving {}m to step {}, arriving in {}s.'.format( + int(meters), step, wait) + + remain = appears - now() - wait + 10 + messages = { + 'wait': wait_msg, + 'early': 'Early for {:6f},{:6f}; waiting {}s...'.format( + step_location[0], step_location[1], remain), + 'late': 'Too late for location {:6f},{:6f}; skipping.'.format( + step_location[0], step_location[1]), + 'search': 'Searching at {:6f},{:6f},{:6f}.'.format( + step_location[0], step_location[1], step_location[2]), + 'invalid': ('Invalid response at {:6f},{:6f}, ' + + 'abandoning location.').format(step_location[0], + step_location[1]) + } + + if remain < self.args.min_seconds_left: + messages['wait'] = ('Unable to reach {:6f},{:6f}, under the ' + + 'speed limit.').format(step_location[0], + step_location[1]) + # Future improvement: insert the item back into the queue, hoping + # that another worker may reach the scan location in time. + return -1, 0, 0, 0, messages, 0 + + return step, step_location, appears, leaves, messages, wait + # SpeedScan is a complete search method that initially does a spawnpoint # search in each scan location by scanning five two-minute bands within @@ -552,10 +569,8 @@ def location_changed(self, scan_location, db_update_queue): log.info('Doing %s distance calcs to assign spawn points to scans', "{:,}".format(len(spawnpoints) * len(scans))) - scan_spawn_point = {} - ScannedLocation.link_spawn_points(scans, initial, spawnpoints, - self.step_distance, scan_spawn_point, - force=True) + scan_spawn_point = ScannedLocation.link_spawn_points( + scans, initial, spawnpoints, self.step_distance) if len(scan_spawn_point): log.info('%d relations found between the spawn points and steps', len(scan_spawn_point)) @@ -957,7 +972,7 @@ def next_item(self, status): # If we are going to get there before it starts then ignore. loc = item['loc'] - if worker_loc: + if worker_loc and self.args.kph > 0: meters = distance(loc, worker_loc) secs_to_arrival = meters / self.args.kph * 3.6 secs_waited = (now_date - last_action).total_seconds() @@ -965,6 +980,7 @@ def next_item(self, status): else: meters = 0 secs_to_arrival = 0 + if ms + secs_to_arrival < item['start']: count_early += 1 continue @@ -1005,8 +1021,7 @@ def next_item(self, status): min_parked_time_remaining, min_fresh_band_time_remaining) else: - log.debug('Enumerating queue found best location: %s.', - repr(best)) + log.debug('Enumerating queue found best location: %s.', best) loc = best.get('loc', []) step = best.get('step', 0) @@ -1045,8 +1060,9 @@ def next_item(self, status): return -1, 0, 0, 0, messages, 0 meters = distance(loc, worker_loc) if worker_loc else 0 - if (meters > (now_date - last_action).total_seconds() * - self.args.kph / 3.6): + if self.args.kph > 0 and (meters > + (now_date - last_action).total_seconds() + * self.args.kph / 3.6): # Flag item as "parked" by a specific thread, because # we're waiting for it. This will avoid all threads "walking" # to the same item. diff --git a/pogom/search.py b/pogom/search.py index 316cc7c338..141875a2d8 100644 --- a/pogom/search.py +++ b/pogom/search.py @@ -43,12 +43,20 @@ from pgoapi.hash_server import HashServer from .models import (parse_map, GymDetails, parse_gyms, MainWorker, WorkerStatus, HashKeys, ScannedLocation) +<<<<<<< HEAD from .utils import now, distance, get_args, clear_dict_response +======= +from .utils import now, distance +>>>>>>> origin/develop from .transform import get_new_coords from .account import AccountSet, get_account, setup_mrmime_account, account_failed, \ account_revive from .captcha import captcha_overseer_thread, handle_captcha from .proxy import get_new_proxy +<<<<<<< HEAD +======= +from .apiRequests import gym_get_info, get_map_objects as gmo +>>>>>>> origin/develop from .transform import jitter_location log = logging.getLogger(__name__) @@ -1069,8 +1077,9 @@ def search_worker_thread(args, account_queue, account_sets, account_failures, status['message'] = messages['wait'] # The next_item will return the value telling us how long - # to sleep. This way the status can be updated - time.sleep(wait) + # to sleep. This way the status can be updated. + if wait > 0: + time.sleep(wait) # Using step as a flag for no valid next location returned. if step == -1: @@ -1120,8 +1129,12 @@ def search_worker_thread(args, account_queue, account_sets, account_failures, # Let the api know where we intend to be for this loop. # Doing this before check_login so it does not also have # to be done when the auth token is refreshed. +<<<<<<< HEAD pgacc.set_position(scan_coords[0], scan_coords[1], scan_coords[2]) +======= + api.set_position(*scan_coords) +>>>>>>> origin/develop if args.hash_key: key = key_scheduler.next() @@ -1144,7 +1157,11 @@ def search_worker_thread(args, account_queue, account_sets, account_failures, # Make the actual request. scan_date = datetime.utcnow() +<<<<<<< HEAD response_dict = pgacc.req_get_map_objects() +======= + response_dict = gmo(api, account, scan_coords) +>>>>>>> origin/develop status['last_scan_date'] = datetime.utcnow() # Record the time and the place that the worker made the @@ -1168,12 +1185,20 @@ def search_worker_thread(args, account_queue, account_sets, account_failures, captcha = handle_captcha(args, status, pgacc, account, account_failures, account_captchas, whq, +<<<<<<< HEAD scan_coords) +======= + response_dict, scan_coords) +>>>>>>> origin/develop if captcha is not None and captcha: # Make another request for the same location # since the previous one was captcha'd. scan_date = datetime.utcnow() +<<<<<<< HEAD response_dict = pgacc.req_get_map_objects() +======= + response_dict = gmo(api, account, scan_coords) +>>>>>>> origin/develop elif captcha is not None: account_queue.task_done() time.sleep(3) @@ -1181,7 +1206,11 @@ def search_worker_thread(args, account_queue, account_sets, account_failures, parsed = parse_map(args, response_dict, scan_coords, scan_location, dbq, whq, key_scheduler, +<<<<<<< HEAD pgacc, status, scan_date, account, +======= + api, status, scan_date, account, +>>>>>>> origin/develop account_sets) scheduler.task_done(status, parsed) @@ -1287,7 +1316,12 @@ def search_worker_thread(args, account_queue, account_sets, account_failures, gym['latitude'], gym['longitude'] ])) +<<<<<<< HEAD response = gym_request(pgacc, scan_coords, gym) +======= + response = gym_get_info(api, account, + scan_coords, gym) +>>>>>>> origin/develop # Make sure the gym was in range. (Sometimes the # API gets cranky about gyms that are ALMOST 1km diff --git a/pogom/transform.py b/pogom/transform.py index ed8a4ad80e..53aad5b8e7 100644 --- a/pogom/transform.py +++ b/pogom/transform.py @@ -100,3 +100,43 @@ def jitter_location(location=None, max_meters=5): distance = math.sqrt(random.random()) * (float(max_meters)) destination = fast_get_new_coords(origin, distance, bearing) return (destination[0], destination[1], location[2]) + + +# Computes the intermediate point at any fraction along the great circle path. +def intermediate_point(pos1, pos2, fraction): + if pos1 == pos2: + return pos1 + + lat1 = math.radians(pos1[0]) + lon1 = math.radians(pos1[1]) + lat2 = math.radians(pos2[0]) + lon2 = math.radians(pos2[1]) + + # Spherical Law of Cosines. + slc = (math.sin(lat1) * math.sin(lat2) + + math.cos(lat1) * math.cos(lat2) * math.cos(lon2 - lon1)) + + if slc > 1: + # Locations are too close to each other. + return pos1 if fraction < 0.5 else pos2 + + delta = math.acos(slc) + + if delta == 0: + # Locations are too close to each other. + return pos1 if fraction < 0.5 else pos2 + + # Intermediate point. + a = math.sin((1 - fraction) * delta) / delta + b = math.sin(fraction * delta) / delta + x = (a * math.cos(lat1) * math.cos(lon1) + + b * math.cos(lat2) * math.cos(lon2)) + y = (a * math.cos(lat1) * math.sin(lon1) + + b * math.cos(lat2) * math.sin(lon2)) + z = a * math.sin(lat1) + b * math.sin(lat2) + + lat3 = math.atan2(z, math.sqrt(x**2 + y**2)) + lon3 = math.atan2(y, x) + + return (((math.degrees(lat3) + 540) % 360) - 180, + ((math.degrees(lon3) + 540) % 360) - 180) diff --git a/pogom/utils.py b/pogom/utils.py index 0ea0f2e786..214447b339 100644 --- a/pogom/utils.py +++ b/pogom/utils.py @@ -2,11 +2,14 @@ # -*- coding: utf-8 -*- import sys +<<<<<<< HEAD import urllib import urlparse from threading import Thread import configargparse +======= +>>>>>>> origin/develop import os import json import logging @@ -18,6 +21,7 @@ import psutil import subprocess import requests +import configargparse from s2sphere import CellId, LatLng from geopy.geocoders import GoogleV3 @@ -295,8 +299,6 @@ def get_args(): action='store_true', default=False) parser.add_argument('-C', '--cors', help='Enable CORS on web server.', action='store_true', default=False) - parser.add_argument('-D', '--db', help='Database filename for SQLite.', - default='pogom.db') parser.add_argument('-cd', '--clear-db', help=('Deletes the existing database before ' + 'starting the Webserver.'), @@ -321,7 +323,11 @@ def get_args(): help=('Use spawnpoint scanning (instead of hex ' + 'grid). Scans in a circle based on step_limit ' + 'when on DB.'), - nargs='?', const='nofile', default=False) + action='store_true', default=False) + parser.add_argument('-ssct', '--ss-cluster-time', + help=('Time threshold in seconds for spawn point ' + + 'clustering (0 to disable).'), + type=int, default=0) parser.add_argument('-speed', '--speed-scan', help=('Use speed scanning to identify spawn points ' + 'and then scan closest spawns.'), @@ -334,20 +340,17 @@ def get_args(): type=int, default=20) parser.add_argument('-kph', '--kph', help=('Set a maximum speed in km/hour for scanner ' + - 'movement.'), + 'movement. 0 to disable. Default: 35.'), type=int, default=35) parser.add_argument('-hkph', '--hlvl-kph', help=('Set a maximum speed in km/hour for scanner ' + - 'movement, for high-level (L30) accounts.'), + 'movement, for high-level (L30) accounts. ' + + '0 to disable. Default: 25.'), type=int, default=25) parser.add_argument('-ldur', '--lure-duration', help=('Change duration for lures set on pokestops. ' + 'This is useful for events that extend lure ' + 'duration.'), type=int, default=30) - parser.add_argument('--dump-spawnpoints', - help=('Dump the spawnpoints from the db to json ' + - '(only for use with -ss).'), - action='store_true', default=False) parser.add_argument('-pd', '--purge-data', help=('Clear Pokemon from database this many hours ' + 'after they disappear (0 to disable).'), @@ -388,22 +391,32 @@ def get_args(): help=('Enable proxy rotation with account changing ' + 'for search threads (none/round/random).'), type=str, default='round') - parser.add_argument('--db-type', - help='Type of database to be used (default: sqlite).', - default='sqlite') - parser.add_argument('--db-name', help='Name of the database to be used.') - parser.add_argument('--db-user', help='Username for the database.') - parser.add_argument('--db-pass', help='Password for the database.') - parser.add_argument('--db-host', help='IP or hostname for the database.') - parser.add_argument( + group = parser.add_argument_group('Database') + group.add_argument( + '--db-name', help='Name of the database to be used.', required=True) + group.add_argument( + '--db-user', help='Username for the database.', required=True) + group.add_argument( + '--db-pass', help='Password for the database.', required=True) + group.add_argument( + '--db-host', + help='IP or hostname for the database.', + default='127.0.0.1') + group.add_argument( '--db-port', help='Port for the database.', type=int, default=3306) - parser.add_argument('--db-threads', - help=('Number of db threads; increase if the db ' + - 'queue falls behind.'), - type=int, default=1) - parser.add_argument('-wh', '--webhook', - help='Define URL(s) to POST webhook information to.', - default=None, dest='webhooks', action='append') + group.add_argument( + '--db-threads', + help=('Number of db threads; increase if the db ' + + 'queue falls behind.'), + type=int, + default=1) + parser.add_argument( + '-wh', + '--webhook', + help='Define URL(s) to POST webhook information to.', + default=None, + dest='webhooks', + action='append') parser.add_argument('-gi', '--gym-info', help=('Get all details about gyms (causes an ' + 'additional API hit for every gym).'), @@ -430,8 +443,13 @@ def get_args(): help=('Number of times to retry sending webhook ' + 'data on failure.'), type=int, default=3) - parser.add_argument('-wht', '--wh-timeout', - help='Timeout (in seconds) for webhook requests.', + parser.add_argument('-whct', '--wh-connect-timeout', + help=('Connect timeout (in seconds) for webhook' + + ' requests.'), + type=float, default=1.0) + parser.add_argument('-whrt', '--wh-read-timeout', + help=('Read timeout (in seconds) for webhook' + + 'requests.'), type=float, default=1.0) parser.add_argument('-whbf', '--wh-backoff-factor', help=('Factor (in seconds) by which the delay ' + @@ -472,9 +490,6 @@ def get_args(): help='Interval to check API version in seconds ' + '(Default: in [60, 300]).', default=random.randint(60, 300)) - parser.add_argument('-el', '--encrypt-lib', - help=('Path to encrypt lib to be used instead of ' + - 'the shipped ones.')) parser.add_argument('-odt', '--on-demand_timeout', help=('Pause searching while web UI is inactive ' + 'for this timeout (in seconds).'), @@ -487,7 +502,7 @@ def get_args(): help=('Enables the use of X-FORWARDED-FOR headers ' + 'to identify the IP of clients connecting ' + 'through these trusted proxies.')) - parser.add_argument('--api-version', default='0.87.5', + parser.add_argument('--api-version', default='0.89.1', help=('API version currently in use.')) parser.add_argument('--no-file-logs', help=('Disable logging to files. ' + @@ -713,6 +728,7 @@ def get_args(): # Make the accounts list. args.accounts = [] args.accounts_L30 = [] +<<<<<<< HEAD if args.pgpool_url: # Request initial number of workers from PGPool args.pgpool_initial_accounts = pgpool_request_accounts(args, initial=True) @@ -763,6 +779,38 @@ def get_args(): } args.accounts_L30.append(hlvl_account) +======= + + if args.high_lvl_accounts: + # Context processor. + with open(args.high_lvl_accounts, 'r') as accs: + for line in accs: + # Make sure it's not an empty line. + if not line.strip(): + continue + + line = line.split(',') + + # We need "service, username, password". + if len(line) < 3: + raise Exception('L30 account is missing a' + + ' field. Each line requires: ' + + '"service,user,pass".') + + # Let's remove trailing whitespace. + service = line[0].strip() + username = line[1].strip() + password = line[2].strip() + + hlvl_account = { + 'auth_service': service, + 'username': username, + 'password': password, + 'captcha': False + } + + args.accounts_L30.append(hlvl_account) +>>>>>>> origin/develop # Prepare the IV/CP scanning filters. args.enc_whitelist = [] @@ -934,7 +982,9 @@ def clock_between(start, test, end): # Return the s2sphere cellid token from a location. def cellid(loc): - return CellId.from_lat_lng(LatLng.from_degrees(loc[0], loc[1])).to_token() + return int( + CellId.from_lat_lng(LatLng.from_degrees(loc[0], loc[1])).to_token(), + 16) # Return approximate distance in meters. @@ -972,6 +1022,38 @@ def i8ln(word): return word +# Thread function for periodical enc list updating. +def dynamic_loading_refresher(file_list): + # We're on a 60-second timer. + refresh_time_sec = 60 + + while True: + # Wait (x-1) seconds before refresh, min. 1s. + time.sleep(max(1, refresh_time_sec - 1)) + + for arg_type, filename in file_list.items(): + try: + # IV/CP scanning. + if filename: + # Only refresh if the file has changed. + current_time_sec = time.time() + file_modified_time_sec = os.path.getmtime(filename) + time_diff_sec = current_time_sec - file_modified_time_sec + + # File has changed in the last refresh_time_sec seconds. + if time_diff_sec < refresh_time_sec: + args = get_args() + with open(filename) as f: + new_list = frozenset([int(l.strip()) for l in f]) + setattr(args, arg_type, new_list) + log.info('New %s is: %s.', arg_type, new_list) + else: + log.debug('No change found in %s.', filename) + except Exception as e: + log.exception('Exception occurred while' + + ' updating %s: %s.', arg_type, e) + + def get_pokemon_data(pokemon_id): if not hasattr(get_pokemon_data, 'pokemon'): args = get_args() @@ -1046,6 +1128,7 @@ def dottedQuadToNum(ip): return struct.unpack("!L", socket.inet_aton(ip))[0] +<<<<<<< HEAD def clear_dict_response(response): responses = [ 'GET_HATCHED_EGGS', 'GET_INVENTORY', 'CHECK_AWARDED_BADGES', @@ -1055,6 +1138,67 @@ def clear_dict_response(response): if item in response: del response[item] return response +======= +# Generate random device info. +# Original by Noctem. +IPHONES = {'iPhone6,1': 'N51AP', + 'iPhone6,2': 'N53AP', + 'iPhone7,1': 'N56AP', + 'iPhone7,2': 'N61AP', + 'iPhone8,1': 'N71AP', + 'iPhone8,2': 'N66AP', + 'iPhone8,4': 'N69AP', + 'iPhone9,1': 'D10AP', + 'iPhone9,2': 'D11AP', + 'iPhone9,3': 'D101AP', + 'iPhone9,4': 'D111AP', + 'iPhone10,1': 'D20AP', + 'iPhone10,2': 'D21AP', + 'iPhone10,3': 'D22AP', + 'iPhone10,4': 'D201AP', + 'iPhone10,5': 'D211AP', + 'iPhone10,6': 'D221AP'} + + +def generate_device_info(identifier): + md5 = hashlib.md5() + md5.update(identifier) + pick_hash = int(md5.hexdigest(), 16) + + device_info = {'device_brand': 'Apple', 'device_model': 'iPhone', + 'hardware_manufacturer': 'Apple', + 'firmware_brand': 'iPhone OS'} + devices = tuple(IPHONES.keys()) + + ios9 = ('9.0', '9.0.1', '9.0.2', '9.1', '9.2', '9.2.1', '9.3', '9.3.1', + '9.3.2', '9.3.3', '9.3.4', '9.3.5') + # 10.0 was only for iPhone 7 and 7 Plus, and is rare. + ios10 = ('10.0.1', '10.0.2', '10.0.3', '10.1', '10.1.1', '10.2', '10.2.1', + '10.3', '10.3.1', '10.3.2', '10.3.3') + ios11 = ('11.0.1', '11.0.2', '11.0.3', '11.1', '11.1.1', '11.1.2') + + device_pick = devices[pick_hash % len(devices)] + device_info['device_model_boot'] = device_pick + device_info['hardware_model'] = IPHONES[device_pick] + device_info['device_id'] = md5.hexdigest() + + if device_pick in ('iPhone10,1', 'iPhone10,2', 'iPhone10,3', + 'iPhone10,4', 'iPhone10,5', 'iPhone10,6'): + # iPhone 8/8+ and X started on 11. + ios_pool = ios11 + elif device_pick in ('iPhone9,1', 'iPhone9,2', 'iPhone9,3', 'iPhone9,4'): + # iPhone 7/7+ started on 10. + ios_pool = ios10 + ios11 + elif device_pick == 'iPhone8,4': + # iPhone SE started on 9.3. + ios_pool = ('9.3', '9.3.1', '9.3.2', '9.3.3', '9.3.4', '9.3.5') \ + + ios10 + ios11 + else: + ios_pool = ios9 + ios10 + ios11 + + device_info['firmware_type'] = ios_pool[pick_hash % len(ios_pool)] + return device_info +>>>>>>> origin/develop def calc_pokemon_level(cp_multiplier): diff --git a/pogom/webhook.py b/pogom/webhook.py index f2202b4dfd..d8724ddeca 100644 --- a/pogom/webhook.py +++ b/pogom/webhook.py @@ -22,14 +22,15 @@ def send_to_webhooks(args, session, message_frame): log.critical('Called send_to_webhook() without webhooks.') return - req_timeout = args.wh_timeout + connect_timeout = args.wh_connect_timeout + read_timeout = args.wh_read_timeout for w in args.webhooks: try: # Disable keep-alive and set streaming to True, so we can skip # the response content. future = session.post(w, json=message_frame, - timeout=(None, req_timeout), + timeout=(connect_timeout, read_timeout), background_callback=__wh_request_completed, headers={'Connection': 'close'}, stream=True) @@ -187,7 +188,7 @@ def __wh_future_completed(future): exc = future.exception(timeout=0) if exc: - log.exception("Something's wrong with your webhook: %s.", exc) + log.warning("Something's wrong with your webhook: %s.", exc) except Exception as ex: log.exception('Unexpected exception in exception info: %s.', ex) diff --git a/requirements.txt b/requirements.txt index 60dc73c638..375ef9c93b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,7 +14,7 @@ PyMySQL==0.7.5 flask-cors==2.1.2 flask-compress==1.3.0 LatLon==1.0.1 -git+https://github.com/michikrug/pgoapi.git@update-ptc-login#egg=pgoapi +git+https://github.com/pogodevorg/pgoapi.git@299dd7cdca75ba44e0fb435ca7e813d8fab80717#egg=pgoapi git+https://github.com/dikkedeur/MrMime.git#egg=MrMime xxhash sphinx==1.4.5 diff --git a/runserver.py b/runserver.py index cc256abbac..b7eb85034c 100755 --- a/runserver.py +++ b/runserver.py @@ -7,7 +7,6 @@ import time import re import ssl -import json import requests from distutils.version import StrictVersion @@ -20,16 +19,22 @@ from flask_cache_bust import init_cache_busting from pogom.app import Pogom +<<<<<<< HEAD from pogom.utils import (get_args, now, gmaps_reverse_geolocate, init_args, log_resource_usage_loop, get_debug_dump_link) +======= +from pogom.utils import (get_args, now, gmaps_reverse_geolocate, + log_resource_usage_loop, get_debug_dump_link, + dynamic_loading_refresher) +>>>>>>> origin/develop from pogom.altitude import get_gmaps_altitude from pogom.models import (init_database, create_tables, drop_tables, - PlayerLocale, SpawnPoint, db_updater, clean_db_loop, + PlayerLocale, db_updater, clean_db_loop, verify_table_encoding, verify_database_schema) from pogom.webhook import wh_updater -from pogom.proxy import load_proxies, check_proxies, proxies_refresher +from pogom.proxy import initialize_proxies from pogom.search import search_overseer_thread from time import strftime @@ -193,7 +198,12 @@ def can_start_scanning(args): api_version_map = { 8302: 8300, 8501: 8500, - 8705: 8700 + 8705: 8700, +<<<<<<< HEAD + 8901: 8900 +======= + 8901: 8900 +>>>>>>> origin/develop } mapped_version_int = api_version_map.get(api_version_int, api_version_int) @@ -208,6 +218,43 @@ def can_start_scanning(args): return True +def startup_db(app, clear_db): + db = init_database(app) + if clear_db: + log.info('Clearing database') + drop_tables(db) + + verify_database_schema(db) + + create_tables(db) + + # Fix encoding on present and future tables. + verify_table_encoding(db) + + if clear_db: + log.info( + 'Drop and recreate is complete. Now remove -cd and restart.') + sys.exit() + return db + + +def extract_coordinates(location): + # Use lat/lng directly if matches such a pattern. + prog = re.compile("^(\-?\d+\.\d+),?\s?(\-?\d+\.\d+)$") + res = prog.match(location) + if res: + log.debug('Using coordinates from CLI directly') + position = (float(res.group(1)), float(res.group(2)), 0) + else: + log.debug('Looking up coordinates in API') + position = util.get_pos_by_name(location) + + if position is None or not any(position): + log.error("Location not found: '{}'".format(location)) + sys.exit() + return position + + def main(): # Patch threading to make exceptions catchable. install_thread_excepthook() @@ -251,34 +298,29 @@ def main(): # Stop if we're just looking for a debug dump. if args.dump: log.info('Retrieving environment info...') - hastebin = get_debug_dump_link() + hastebin_id = get_debug_dump_link() log.info('Done! Your debug link: https://hastebin.com/%s.txt', - hastebin) + hastebin_id) sys.exit(1) # Let's not forget to run Grunt / Only needed when running with webserver. if not args.no_server and not validate_assets(args): sys.exit(1) - # Use lat/lng directly if matches such a pattern. - prog = re.compile("^(\-?\d+\.\d+),?\s?(\-?\d+\.\d+)$") - res = prog.match(args.location) - if res: - log.debug('Using coordinates from CLI directly') - position = (float(res.group(1)), float(res.group(2)), 0) - else: - log.debug('Looking up coordinates in API') - position = util.get_pos_by_name(args.location) + if args.no_version_check and not args.only_server: + log.warning('You are running RocketMap in No Version Check mode. ' + "If you don't know what you're doing, this mode " + 'can have negative consequences, and you will not ' + 'receive support running in NoVC mode. ' + 'You have been warned.') - if position is None or not any(position): - log.error("Location not found: '{}'".format(args.location)) - sys.exit() + position = extract_coordinates(args.location) # Use the latitude and longitude to get the local altitude from Google. (altitude, status) = get_gmaps_altitude(position[0], position[1], args.gmaps_key) if altitude is not None: - log.debug('Local altitude is: %sm', altitude) + log.debug('Local altitude is: %sm.', altitude) position = (position[0], position[1], altitude) else: if status == 'REQUEST_DENIED': @@ -291,17 +333,18 @@ def main(): log.error('Unable to retrieve altitude from Google APIs' + 'setting to 0') - log.info('Parsed location is: %.4f/%.4f/%.4f (lat/lng/alt)', + log.info('Parsed location is: %.4f/%.4f/%.4f (lat/lng/alt).', position[0], position[1], position[2]) - if args.no_pokemon: - log.info('Parsing of Pokemon disabled.') - if args.no_pokestops: - log.info('Parsing of Pokestops disabled.') - if args.no_gyms: - log.info('Parsing of Gyms disabled.') - if args.encounter: - log.info('Encountering pokemon enabled.') + # Scanning toggles. + log.info('Parsing of Pokemon %s.', + 'disabled' if args.no_pokemon else 'enabled') + log.info('Parsing of Pokestops %s.', + 'disabled' if args.no_pokestops else 'enabled') + log.info('Parsing of Gyms %s.', + 'disabled' if args.no_gyms else 'enabled') + log.info('Pokemon encounters %s.', + 'enabled' if args.encounter else 'disabled') app = None if not args.no_server and not args.clear_db: @@ -311,25 +354,7 @@ def main(): app.before_request(app.validate_request) app.set_current_location(position) - db = init_database(app) - if args.clear_db: - log.info('Clearing database') - if args.db_type == 'mysql': - drop_tables(db) - elif os.path.isfile(args.db): - os.remove(args.db) - - verify_database_schema(db) - - create_tables(db) - - # Fix encoding on present and future tables. - verify_table_encoding(db) - - if args.clear_db: - log.info( - 'Drop and recreate is complete. Now remove -cd and restart.') - sys.exit() + db = startup_db(app, args.clear_db) # Control the search status (running or not) across threads. control_flags = { @@ -376,7 +401,7 @@ def main(): wh_updates_queue = Queue() wh_key_cache = {} - if len(args.wh_types) == 0: + if not args.wh_types: log.info('Webhook disabled.') else: log.info('Webhook enabled for events: sending %s to %s.', @@ -392,27 +417,49 @@ def main(): t.start() if not args.only_server: + # Speed limit. + log.info('Scanning speed limit %s.', + 'set to {} km/h'.format(args.kph) + if args.kph > 0 else 'disabled') + log.info('High-level speed limit %s.', + 'set to {} km/h'.format(args.hlvl_kph) + if args.hlvl_kph > 0 else 'disabled') + # Check if we are able to scan. if not can_start_scanning(args): sys.exit(1) - # Processing proxies if set (load from file, check and overwrite old - # args.proxy with new working list). - args.proxy = load_proxies(args) + initialize_proxies(args) + + # Monitor files, update data if they've changed recently. + # Keys are 'args' object keys, values are filenames to load. + files_to_monitor = {} - if args.proxy and not args.proxy_skip_check: - args.proxy = check_proxies(args, args.proxy) + if args.encounter: + files_to_monitor['enc_whitelist'] = args.enc_whitelist_file + log.info('Encounters are enabled.') + else: + log.info('Encounters are disabled.') + + if args.webhook_blacklist_file: + files_to_monitor['webhook_blacklist'] = args.webhook_blacklist_file + log.info('Webhook blacklist is enabled.') + elif args.webhook_whitelist_file: + files_to_monitor['webhook_whitelist'] = args.webhook_whitelist_file + log.info('Webhook whitelist is enabled.') + else: + log.info('Webhook whitelist/blacklist is disabled.') - # Run periodical proxy refresh thread. - if (args.proxy_file is not None) and (args.proxy_refresh > 0): - t = Thread(target=proxies_refresher, - name='proxy-refresh', args=(args,)) + if files_to_monitor: + t = Thread(target=dynamic_loading_refresher, + name='dynamic-enclist', args=(files_to_monitor,)) t.daemon = True t.start() + log.info('Dynamic list refresher is enabled.') else: - log.info('Periodical proxies refresh disabled.') + log.info('Dynamic list refresher is disabled.') - # Update player locale if not set correctly, yet. + # Update player locale if not set correctly yet. args.player_locale = PlayerLocale.get_locale(args.location) if not args.player_locale: args.player_locale = gmaps_reverse_geolocate( @@ -431,20 +478,6 @@ def main(): 'Existing player locale has been retrieved from the DB.') # Gather the Pokemon! - - # Attempt to dump the spawn points (do this before starting threads of - # endure the woe). - if (args.spawnpoint_scanning and - args.spawnpoint_scanning != 'nofile' and - args.dump_spawnpoints): - with open(args.spawnpoint_scanning, 'w+') as file: - log.info( - 'Saving spawn points to %s', args.spawnpoint_scanning) - spawns = SpawnPoint.get_spawnpoints_in_hex( - position, args.step_limit) - file.write(json.dumps(spawns)) - log.info('Finished exporting spawn points') - argset = (args, new_location_queue, control_flags, heartbeat, db_updates_queue, wh_updates_queue) diff --git a/static/images/bookmarklet.gif b/static/images/bookmarklet.gif new file mode 100644 index 0000000000..31e5bd1ff2 Binary files /dev/null and b/static/images/bookmarklet.gif differ diff --git a/static/js/custom.js.example b/static/js/custom.js.example index ea9e8b8b62..2ed5fb3bde 100644 --- a/static/js/custom.js.example +++ b/static/js/custom.js.example @@ -1,14 +1,21 @@ $(function () { 'use strict' +<<<<<<< HEAD // Marker cluster might have loaded before custom.js. const isMarkerClusterLoaded = typeof window.markerCluster !== 'undefined' && !!window.markerCluster +======= +>>>>>>> origin/develop /* Settings. */ + const showSearchMarker = true // Show a marker on the map's scan location. Default: false. + const isSearchMarkerMovable = false // Let the user move the scan location marker around. Doesn't do anything without --no-fixed-location. Default: false. + const showLocationMarker = true // Show a marker on the visitor's location. Default: false. + const isLocationMarkerMovable = false // Let the user move the visitor marker around. Default: false. const scaleByRarity = true // Enable scaling by rarity. Default: true. const upscalePokemon = false // Enable upscaling of certain Pokemon (upscaledPokemon and notify list). Default: false. - const upscaledPokemon = [] // Add Pokémon IDs separated by commas (e.g. [1, 2, 3]) to upscale icons. + const upscaledPokemon = [] // Add Pokémon IDs separated by commas (e.g. [1, 2, 3]) to upscale icons. Default: []. // Google Analytics property ID. Leave empty to disable. // Looks like 'UA-XXXXX-Y'. @@ -33,17 +40,19 @@ $(function () { ] // Clustering! Different zoom levels for desktop vs mobile. - const disableClusters = false // Default: false - const maxClusterZoomLevel = 14 // Default: 14 - const maxClusterZoomLevelMobile = 14 // Default: same as desktop - const clusterZoomOnClick = false // Default: false - const clusterZoomOnClickMobile = false // Default: same as desktop - const clusterGridSize = 60 // Default: 60 - const clusterGridSizeMobile = 60 // Default: same as desktop + const disableClusters = false // Default: false. + const maxClusterZoomLevel = 14 // Default: 14. + const maxClusterZoomLevelMobile = 14 // Default: 14. + const clusterZoomOnClick = false // Default: false. + const clusterZoomOnClickMobile = false // Default: false. + const clusterGridSize = 60 // Default: 60. + const clusterGridSizeMobile = 60 // Default: 60. // Process Pokémon in chunks to improve responsiveness. - const processPokemonChunkSize = 100 // Default: 100 - const processPokemonIntervalMs = 100 // Default: 100ms + const processPokemonChunkSize = 100 // Default: 100. + const processPokemonIntervalMs = 100 // Default: 100ms. + const processPokemonChunkSizeMobile = 100 // Default: 100. + const processPokemonIntervalMsMobile = 100 // Default: 100ms. /* Feature detection. */ @@ -63,7 +72,8 @@ $(function () { /* Do stuff. */ const currentPage = window.location.pathname - + // Marker cluster might have loaded before custom.js. + const isMarkerClusterLoaded = typeof window.markerCluster !== 'undefined' && !!window.markerCluster // Set custom Store values. Store.set('maxClusterZoomLevel', maxClusterZoomLevel) @@ -74,11 +84,17 @@ $(function () { Store.set('scaleByRarity', scaleByRarity) Store.set('upscalePokemon', upscalePokemon) Store.set('upscaledPokemon', upscaledPokemon) + Store.set('showSearchMarker', showSearchMarker) + Store.set('isSearchMarkerMovable', isSearchMarkerMovable) + Store.set('showLocationMarker', showLocationMarker) + Store.set('isLocationMarkerMovable', isLocationMarkerMovable) if (typeof window.orientation !== 'undefined' || isMobileDevice()) { Store.set('maxClusterZoomLevel', maxClusterZoomLevelMobile) Store.set('clusterZoomOnClick', clusterZoomOnClickMobile) Store.set('clusterGridSize', clusterGridSizeMobile) + Store.set('processPokemonChunkSize', processPokemonChunkSizeMobile) + Store.set('processPokemonIntervalMs', processPokemonIntervalMsMobile) } if (disableClusters) { diff --git a/static/js/map.common.js b/static/js/map.common.js index 8b0b8364f1..56e12e068d 100644 --- a/static/js/map.common.js +++ b/static/js/map.common.js @@ -866,8 +866,13 @@ var StoreOptions = { default: '', type: StoreTypes.Number }, +<<<<<<< HEAD 'remember_text_level_notify': { default: '', +======= + 'excludedRarity': { + default: 0, // 0: none, 1: <=Common, 2: <=Uncommon, 3: <=Rare, 4: <=Very Rare, 5: <=Ultra Rare +>>>>>>> origin/develop type: StoreTypes.Number }, 'showRaids': { @@ -918,6 +923,10 @@ var StoreOptions = { default: true, type: StoreTypes.Boolean }, + 'showPokemonStats': { + default: true, + type: StoreTypes.Boolean + }, 'showPokestops': { default: true, type: StoreTypes.Boolean @@ -1053,6 +1062,22 @@ var StoreOptions = { 'isBounceDisabled': { default: false, type: StoreTypes.Boolean + }, + 'showLocationMarker': { + default: true, + type: StoreTypes.Boolean + }, + 'isLocationMarkerMovable': { + default: false, + type: StoreTypes.Boolean + }, + 'showSearchMarker': { + default: true, + type: StoreTypes.Boolean + }, + 'isSearchMarkerMovable': { + default: false, + type: StoreTypes.Boolean } } @@ -1207,6 +1232,21 @@ function updatePokemonMarker(item, map, scaleByRarity = true, isNotifyPkmn = fal marker.setIcon(icon) } +function updatePokemonLabel(item) { + // Only update label when Pokémon has been encountered. + if (item['cp'] !== null && item['cpMultiplier'] !== null) { + item.marker.infoWindow.setContent(pokemonLabel(item)) + } +} + +function updatePokemonLabels(pokemonList) { + $.each(pokemonList, function (key, value) { + var item = pokemonList[key] + + updatePokemonLabel(item) + }) +} + function isTouchDevice() { // Should cover most browsers return 'ontouchstart' in window || navigator.maxTouchPoints diff --git a/static/js/map.js b/static/js/map.js index aced6c144e..4a5d078657 100644 --- a/static/js/map.js +++ b/static/js/map.js @@ -21,6 +21,7 @@ var $selectLuredPokestopsOnly var $selectSearchIconMarker var $selectLocationIconMarker var $switchGymSidebar +var $selectExcludeRarity const language = document.documentElement.lang === '' ? 'en' : document.documentElement.lang var idToPokemon = {} @@ -32,6 +33,8 @@ var searchMarkerStyles var timestamp var excludedPokemon = [] +var excludedPokemonByRarity = [] +var excludedRarity var notifiedPokemon = [] var notifiedRarity = [] var notifiedMinPerfection = null @@ -74,6 +77,7 @@ const cryFileTypes = ['wav', 'mp3'] const genderType = ['♂', '♀', '⚲'] const unownForm = ['unset', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '!', '?'] +<<<<<<< HEAD const weatherImages = { 1: 'weather_sunny.png', @@ -103,6 +107,20 @@ const alertTexts = { 1: 'Moderate', 2: 'Extreme', } +======= +const pokemonWithImages = [ + 2, 3, 5, 6, 8, 9, 11, 28, 31, 34, 38, 59, 62, 65, 68, 71, 73, 76, 82, 89, 91, 94, 103, 105, 110, 112, 123, 125, 126, 129, 131, 134, 135, 136, 137, 139, 143, 144, 145, 146, 150, 153, 156, 159, 243, 244, 245, 248, 249, 250, 302, 303, 320, 359, 382, 383, 384 +] + +const excludedRaritiesList = [ + [], + ['common'], + ['common', 'uncommon'], + ['common', 'uncommon', 'rare'], + ['common', 'uncommon', 'rare', 'very rare'], + ['common', 'uncommon', 'rare', 'very rare', 'ultra rare'] +] +>>>>>>> origin/develop /* text place holders: @@ -306,8 +324,20 @@ function initMap() { // eslint-disable-line no-unused-vars }, 500) }) - searchMarker = createSearchMarker() - locationMarker = createLocationMarker() + const showSearchMarker = Store.get('showSearchMarker') + const showLocationMarker = Store.get('showLocationMarker') + const isLocationMarkerMovable = Store.get('isLocationMarkerMovable') + + if (showSearchMarker) { + // Whether marker is draggable or not is set in createSearchMarker(). + searchMarker = createSearchMarker() + } + + if (showLocationMarker) { + locationMarker = createLocationMarker() + locationMarker.setDraggable(isLocationMarkerMovable) + } + createMyLocationButton() initSidebar() @@ -327,6 +357,11 @@ function initMap() { // eslint-disable-line no-unused-vars } function updateLocationMarker(style) { + // Don't do anything if it's disabled. + if (!locationMarker) { + return + } + if (style in searchMarkerStyles) { var url = searchMarkerStyles[style].icon if (url) { @@ -340,6 +375,7 @@ function updateLocationMarker(style) { Store.set('locationMarkerStyle', style) } + // Return value is currently unused. return locationMarker } @@ -381,6 +417,13 @@ function createLocationMarker() { function updateSearchMarker(style) { if (style in searchMarkerStyles) { + Store.set('searchMarkerStyle', style) + + // If it's disabled, stop. + if (!searchMarker) { + return + } + var url = searchMarkerStyles[style].icon if (url) { searchMarker.setIcon({ @@ -390,21 +433,21 @@ function updateSearchMarker(style) { } else { searchMarker.setIcon(url) } - Store.set('searchMarkerStyle', style) } return searchMarker } function createSearchMarker() { - var searchMarker = new google.maps.Marker({ // need to keep reference. + const isSearchMarkerMovable = Store.get('isSearchMarkerMovable') + const searchMarker = new google.maps.Marker({ // need to keep reference. position: { lat: centerLat, lng: centerLng }, map: map, animation: google.maps.Animation.DROP, - draggable: !Store.get('lockMarker'), + draggable: !Store.get('lockMarker') && isSearchMarkerMovable, icon: null, optimized: false, zIndex: google.maps.Marker.MAX_ZINDEX + 1 @@ -468,6 +511,7 @@ function initSidebar() { $('#max-level-gyms-filter-switch').val(Store.get('maxGymLevel')) $('#last-update-gyms-switch').val(Store.get('showLastUpdatedGymsOnly')) $('#pokemon-switch').prop('checked', Store.get('showPokemon')) + $('#pokemon-stats-switch').prop('checked', Store.get('showPokemonStats')) $('#pokestops-switch').prop('checked', Store.get('showPokestops')) $('#lured-pokestops-only-switch').val(Store.get('showLuredPokestopsOnly')) $('#lured-pokestops-only-wrapper').toggle(Store.get('showPokestops')) @@ -480,9 +524,13 @@ function initSidebar() { $('#scanned-switch').prop('checked', Store.get('showScanned')) $('#spawnpoints-switch').prop('checked', Store.get('showSpawnpoints')) $('#ranges-switch').prop('checked', Store.get('showRanges')) +<<<<<<< HEAD $('#hideunnotified-switch').prop('checked', Store.get('hideNotNotified')) $('#popups-switch').prop('checked', Store.get('showPopups')) $('#bounce-switch').prop('checked', Store.get('isBounceDisabled')) +======= + $('#notify-perfection-wrapper').toggle(Store.get('showPokemonStats')) +>>>>>>> origin/develop $('#sound-switch').prop('checked', Store.get('playSound')) $('#pokemoncries').toggle(Store.get('playSound')) $('#cries-switch').prop('checked', Store.get('playCries')) @@ -605,6 +653,7 @@ function pokemonLabel(item) { var form = item['form'] var cp = item['cp'] var cpMultiplier = item['cp_multiplier'] +<<<<<<< HEAD var prob1 = item['catch_prob_1'] var prob2 = item['catch_prob_2'] var prob3 = item['catch_prob_3'] @@ -612,6 +661,9 @@ function pokemonLabel(item) { var ratingDefense = item['rating_defense'] var encounterIdLong = atob(encounterId) var weather_boosted_condition = item['weather_boosted_condition'] +======= + const showStats = Store.get('showPokemonStats') +>>>>>>> origin/develop $.each(types, function (index, type) { typesDisplay += getTypeSpan(type) @@ -631,6 +683,7 @@ function pokemonLabel(item) { ${name} #${id} ${formString} ${genderType[gender - 1]} ${rarityDisplay} ${typesDisplay} ` +<<<<<<< HEAD var weatherBoost = '' if (weather_boosted_condition) { weatherBoost = `
Boosted by: @@ -665,6 +718,9 @@ function pokemonLabel(item) { var pokemon_icon = get_pokemon_raw_icon_url(item) if (cp !== null && cpMultiplier !== null) { +======= + if (showStats && cp !== null && cpMultiplier !== null) { +>>>>>>> origin/develop var pokemonLevel = getPokemonLevel(cpMultiplier) if (atk !== null && def !== null && sta !== null) { @@ -798,7 +854,6 @@ function gymLabel(gym, includeMembers = true) { const lastScannedStr = getDateStr(gym.last_scanned) const lastModifiedStr = getDateStr(gym.last_modified) const slotsString = gym.slots_available ? (gym.slots_available === 1 ? '1 Free Slot' : `${gym.slots_available} Free Slots`) : 'No Free Slots' - const teamColor = ['85,85,85,1', '0,134,255,1', '255,26,26,1', '255,159,25,1'] const teamName = gymTypes[gym.team_id] const isUpcomingRaid = raid != null && Date.now() < raid.start const isRaidStarted = isOngoingRaid(raid) @@ -813,8 +868,8 @@ function gymLabel(gym, includeMembers = true) { const gymPoints = gym.total_cp const titleText = gym.name ? gym.name : (gym.team_id === 0 ? teamName : 'Team ' + teamName) const title = ` -
- ${titleText} +
+ ${titleText}
` if (gym.team_id !== 0) { @@ -830,8 +885,10 @@ function gymLabel(gym, includeMembers = true) { if ((isUpcomingRaid || isRaidStarted) && isRaidFilterOn && isGymSatisfiesRaidMinMaxFilter(raid)) { const raidColor = ['252,112,176', '255,158,22', '184,165,221'] const levelStr = '★'.repeat(raid['level']) + let raidImage = '' if (isRaidStarted) { +<<<<<<< HEAD // Use Pokémon-specific image. var pokemon_icon = get_pokemon_raw_icon_url(raid) if (raid.pokemon_id !== null) { @@ -839,21 +896,46 @@ function gymLabel(gym, includeMembers = true) {
+======= + // set Pokémon-specific image if we have one. + if (raid.pokemon_id !== null && pokemonWithImages.indexOf(raid.pokemon_id) !== -1) { + raidImage = `` + } else { + raidImage = `` + } + if (raid.pokemon_id === null) { + image = ` + ${raidImage} +
+ + ${levelStr} + + left (${moment(raid.end).format('HH:mm')})
-
-
-
- ${raid['pokemon_name']} #${raid['pokemon_id']} | CP: ${raid['cp']} -
- ${raidStr} -
+ ` + } else { + image = ` +
+
+
+ ${raidImage} +
+
+
+
+
+ ${raid['pokemon_name']} #${raid['pokemon_id']} | CP: ${raid['cp']} +
+ ${raidStr} +
+
+>>>>>>> origin/develop
-
- - ${levelStr} - - left (${moment(raid.end).format('HH:mm')}) + + ${levelStr} + + left (${moment(raid.end).format('HH:mm')})
` } @@ -1120,7 +1202,8 @@ function getNotifyText(item) { var find = ['', '', '', '', ''] var replace = [((iv) ? iv.toFixed(1) : ''), item['pokemon_name'], item['individual_attack'], item['individual_defense'], item['individual_stamina']] - var ntitle = repArray(((iv) ? notifyIvTitle : notifyNoIvTitle), find, replace) + const showStats = Store.get('showPokemonStats') + var ntitle = repArray(((showStats && iv) ? notifyIvTitle : notifyNoIvTitle), find, replace) var dist = moment(item['disappear_time']).format('HH:mm:ss') var until = getTimeUntil(item['disappear_time']) var udist = (until.hour > 0) ? until.hour + ':' : '' @@ -1164,8 +1247,7 @@ function playPokemonSound(pokemonID, cryFileTypes) { } } -function isNotifyPoke(poke) { - const isOnNotifyList = notifiedPokemon.indexOf(poke['pokemon_id']) > -1 || notifiedRarity.indexOf(poke['pokemon_rarity']) > -1 +function isNotifyPerfectionPoke(poke) { var hasHighIV = false var hasHighLevel = false var hasHighAttributes = false @@ -1180,7 +1262,32 @@ function isNotifyPoke(poke) { hasHighAttributes = (hasHighIV && !(notifiedMinLevel > 0)) || (hasHighLevel && !(notifiedMinPerfection > 0)) || hasHighLevel && hasHighIV } +<<<<<<< HEAD return isOnNotifyList || hasHighAttributes +======= + return hasHighIV +} + +function isNotifyPoke(poke) { + const isOnNotifyList = notifiedPokemon.indexOf(poke['pokemon_id']) > -1 || notifiedRarity.indexOf(poke['pokemon_rarity']) > -1 + const isNotifyPerfectionPkmn = isNotifyPerfectionPoke(poke) + const showStats = Store.get('showPokemonStats') + + return isOnNotifyList || (showStats && isNotifyPerfectionPkmn) +} + +function getNotifyPerfectionPokemons(pokemonList) { + var notifyPerfectionPkmn = [] + $.each(pokemonList, function (key, value) { + var item = pokemonList[key] + + if (isNotifyPerfectionPoke(item)) { + notifyPerfectionPkmn.push(item) + } + }) + + return notifyPerfectionPkmn +>>>>>>> origin/develop } function customizePokemonMarker(marker, item, skipNotification) { @@ -1479,12 +1586,34 @@ function addListeners(marker) { function clearStaleMarkers() { const oldPokeMarkers = [] +<<<<<<< HEAD $.each(mapData.pokemons, function (key, value) { const isPokeExpired = mapData.pokemons[key]['disappear_time'] < Date.now() const isPokeExcluded = getExcludedPokemon().indexOf(mapData.pokemons[key]['pokemon_id']) !== -1 if (isPokeExpired || isPokeExcluded) { const oldMarker = mapData.pokemons[key].marker +======= + $.each(mapData.pokemons, function (key, pokemon) { + const pokemonId = pokemon['pokemon_id'] + const isPokeExpired = pokemon['disappear_time'] < Date.now() + const isPokeExcluded = excludedPokemon.indexOf(pokemonId) !== -1 + // Limit choice to our options [0, 5]. + const excludedRarityOption = Math.min(Math.max(Store.get('excludedRarity'), 0), 5) + const excludedRarity = excludedRaritiesList[excludedRarityOption] + const hasRarity = pokemon.hasOwnProperty('pokemon_rarity') + // Not beautiful code with null as fallback, but it's more readable than a one-liner. + const rarity = hasRarity ? pokemon['pokemon_rarity'].toLowerCase() : null + const isRarityExcluded = (hasRarity && excludedRarity.indexOf(rarity) !== -1) + + if (isPokeExpired || isPokeExcluded || isRarityExcluded) { + const oldMarker = pokemon.marker + const isPokeExcludedByRarity = excludedPokemonByRarity.indexOf(pokemonId) !== -1 + + if (isRarityExcluded && !isPokeExcludedByRarity) { + excludedPokemonByRarity.push(pokemonId) + } +>>>>>>> origin/develop if (oldMarker.rangeCircle) { oldMarker.rangeCircle.setMap(null) @@ -1504,18 +1633,25 @@ function clearStaleMarkers() { markerCluster.removeMarkers(oldPokeMarkers, true) +<<<<<<< HEAD $.each(mapData.lurePokemons, function (key, value) { if (mapData.lurePokemons[key]['lure_expiration'] < new Date().getTime() || getExcludedPokemon().indexOf(mapData.lurePokemons[key]['pokemon_id']) >= 0) { mapData.lurePokemons[key].marker.setMap(null) +======= + $.each(mapData.lurePokemons, function (key, lurePokemon) { + if (lurePokemon['lure_expiration'] < new Date().getTime() || + excludedPokemon.indexOf(lurePokemon['pokemon_id']) >= 0) { + lurePokemon.marker.setMap(null) +>>>>>>> origin/develop delete mapData.lurePokemons[key] } }) - $.each(mapData.scanned, function (key, value) { + $.each(mapData.scanned, function (key, scanned) { // If older than 15mins remove - if (mapData.scanned[key]['last_modified'] < (new Date().getTime() - 15 * 60 * 1000)) { - mapData.scanned[key].marker.setMap(null) + if (scanned['last_modified'] < (new Date().getTime() - 15 * 60 * 1000)) { + scanned.marker.setMap(null) delete mapData.scanned[key] } }) @@ -1731,17 +1867,35 @@ function processPokemonChunked(pokemon, chunkSize) { } function processPokemon(item) { +<<<<<<< HEAD const isExcludedPoke = getExcludedPokemon().indexOf(item['pokemon_id']) !== -1 +======= + const isPokeExcluded = excludedPokemon.indexOf(item['pokemon_id']) !== -1 +>>>>>>> origin/develop const isPokeAlive = item['disappear_time'] > Date.now() + // Limit choice to our options [0, 5]. + const excludedRarityOption = Math.min(Math.max(Store.get('excludedRarity'), 0), 5) + const excludedRarity = excludedRaritiesList[excludedRarityOption] + const hasRarity = item.hasOwnProperty('pokemon_rarity') + // Not beautiful code with null as fallback, but it's more readable than a one-liner. + const rarity = hasRarity ? item['pokemon_rarity'].toLowerCase() : null + const isRarityExcluded = (hasRarity && excludedRarity.indexOf(rarity) !== -1) + const isPokeExcludedByRarity = excludedPokemonByRarity.indexOf(item['pokemon_id']) !== -1 var oldMarker = null var newMarker = null if (!(item['encounter_id'] in mapData.pokemons) && +<<<<<<< HEAD !isExcludedPoke && isPokeAlive) { // Add marker to map and item to dict. const isNotifyPkmn = isNotifyPoke(item) if (!item.hidden && (!Store.get('hideNotNotified') || isNotifyPkmn)) { +======= + !isPokeExcluded && !isRarityExcluded && isPokeAlive) { + // Add marker to map and item to dict. + if (!item.hidden) { +>>>>>>> origin/develop const isBounceDisabled = Store.get('isBounceDisabled') const scaleByRarity = Store.get('scaleByRarity') @@ -1757,6 +1911,8 @@ function processPokemon(item) { } else { oldMarker = item.marker } + } else if (isRarityExcluded && !isPokeExcludedByRarity) { + excludedPokemonByRarity.push(item['pokemon_id']) } return [newMarker, oldMarker] @@ -2175,7 +2331,11 @@ function centerMapOnLocation() { if (navigator.geolocation) { navigator.geolocation.getCurrentPosition(function (position) { var latlng = new google.maps.LatLng(position.coords.latitude, position.coords.longitude) - locationMarker.setPosition(latlng) + + if (locationMarker) { + locationMarker.setPosition(latlng) + } + map.setCenter(latlng) Store.set('followMyLocationPosition', { lat: position.coords.latitude, @@ -2194,7 +2354,10 @@ function changeLocation(lat, lng) { var loc = new google.maps.LatLng(lat, lng) changeSearchLocation(lat, lng).done(function () { map.setCenter(loc) - searchMarker.setPosition(loc) + + if (searchMarker) { + searchMarker.setPosition(loc) + } }) } @@ -2244,7 +2407,7 @@ function updateGeoLocation() { var center = new google.maps.LatLng(lat, lng) if (Store.get('geoLocate')) { - // the search function makes any small movements cause a loop. Need to increase resolution + // The search function makes any small movements cause a loop. Need to increase resolution. if ((typeof searchMarker !== 'undefined') && (getPointDistance(searchMarker.getPosition(), center) > 40)) { $.post('next_loc?lat=' + lat + '&lon=' + lng).done(function () { map.panTo(center) @@ -2365,20 +2528,48 @@ function showGymDetails(id) { // eslint-disable-line no-unused-vars function getSidebarGymMember(pokemon) { var perfectPercent = getIv(pokemon.iv_attack, pokemon.iv_defense, pokemon.iv_stamina) var moveEnergy = Math.round(100 / pokemon.move_2_energy) + const motivationZone = ['Good', 'Average', 'Bad'] + const motivationPercentage = (pokemon.cp_decayed / pokemon.pokemon_cp) * 100 + var colorIdx = 0 + if (motivationPercentage <= 46.66) { + colorIdx = 2 + } else if ((motivationPercentage > 46.66) && (motivationPercentage < 73.33)) { + colorIdx = 1 + } + // Skip getDateStr() so we can re-use the moment.js object. + var relativeTime = 'Unknown' + var absoluteTime = '' + +<<<<<<< HEAD var pokemon_image = get_pokemon_raw_icon_url(pokemon) +======= + if (pokemon.deployment_time) { + let deploymentTime = moment(pokemon.deployment_time) + relativeTime = deploymentTime.fromNow() + // Append as string so we show nothing when the time is Unknown. + absoluteTime = '
(' + deploymentTime.format('MMM Do HH:mm') + ')
' + } + +>>>>>>> origin/develop return ` -
${pokemon.pokemon_name}
-
${pokemon.cp_decayed}
+
${pokemon.pokemon_name}
+
+ ${pokemon.cp_decayed} +
+
+ Max: ${pokemon.pokemon_cp} +
-
${pokemon.trainer_name} (${pokemon.trainer_level})
-
Deployed ${getDateStr(pokemon.deployment_time)}
+
${pokemon.trainer_name} (${pokemon.trainer_level})
+
Deployed ${relativeTime}
+ ${absoluteTime} @@ -2677,6 +2868,18 @@ $(function () { updateMap() }) + $selectExcludeRarity = $('#exclude-rarity') + + $selectExcludeRarity.select2({ + placeholder: 'None', + minimumResultsForSearch: Infinity + }) + + $selectExcludeRarity.on('change', function () { + Store.set('excludedRarity', this.value) + updateMap() + }) + $selectSearchIconMarker = $('#iconmarker-style') $selectLocationIconMarker = $('#locationmarker-style') @@ -2749,6 +2952,7 @@ $(function () { }) $selectExclude = $('#exclude-pokemon') + $selectExcludeRarity = $('#exclude-rarity') $selectPokemonNotify = $('#notify-pokemon') $selectRarityNotify = $('#notify-rarity') $textPerfectionNotify = $('#notify-perfection') @@ -2808,6 +3012,13 @@ $(function () { clearStaleMarkers() Store.set('remember_select_exclude', excludedPokemon) }) + $selectExcludeRarity.on('change', function (e) { + excludedRarity = $selectExcludeRarity.val() + reincludedPokemon = reincludedPokemon.concat(excludedPokemonByRarity) + excludedPokemonByRarity = [] + clearStaleMarkers() + Store.set('excludedRarity', excludedRarity) + }) $selectPokemonNotify.on('change', function (e) { notifiedPokemon = $selectPokemonNotify.val().map(Number) Store.set('remember_select_notify', notifiedPokemon) @@ -2841,6 +3052,7 @@ $(function () { // recall saved lists $selectExclude.val(Store.get('remember_select_exclude')).trigger('change') + $selectExcludeRarity.val(Store.get('excludedRarity')).trigger('change') $selectPokemonNotify.val(Store.get('remember_select_notify')).trigger('change') $selectRarityNotify.val(Store.get('remember_select_rarity_notify')).trigger('change') $textPerfectionNotify.val(Store.get('remember_text_perfection_notify')).trigger('change') @@ -2985,6 +3197,24 @@ $(function () { buildSwitchChangeListener(mapData, ['pokemons'], 'showPokemon').bind(this)() markerCluster.repaint() }) + $('#pokemon-stats-switch').change(function () { + Store.set('showPokemonStats', this.checked) + var options = { + 'duration': 500 + } + const $wrapper = $('#notify-perfection-wrapper') + if (this.checked) { + $wrapper.show(options) + } else { + $wrapper.hide(options) + } + updatePokemonLabels(mapData.pokemons) + // Only redraw Pokémon which are notified of perfection. + var notifyPerfectionPkmn = getNotifyPerfectionPokemons(mapData.pokemons) + redrawPokemon(notifyPerfectionPkmn) + + markerCluster.redraw() + }) $('#scanned-switch').change(function () { buildSwitchChangeListener(mapData, ['scanned'], 'showScanned').bind(this)() }) @@ -3065,7 +3295,10 @@ $(function () { $('#lock-marker-switch').change(function () { Store.set('lockMarker', this.checked) - searchMarker.setDraggable(!this.checked) + + if (searchMarker) { + searchMarker.setDraggable(!this.checked) + } }) $('#search-switch').change(function () { @@ -3082,7 +3315,17 @@ $(function () { } else { Store.set('followMyLocation', this.checked) } - locationMarker.setDraggable(!this.checked) + + if (locationMarker) { + if (this.checked) { + // Follow our position programatically, so no dragging. + locationMarker.setDraggable(false) + } else { + // Go back to default non-follow. + const isMarkerMovable = Store.get('isLocationMarkerMovable') + locationMarker.setDraggable(isMarkerMovable) + } + } }) $('#scan-here-switch').change(function () { diff --git a/static/sass/layout/_gym-details.scss b/static/sass/layout/_gym-details.scss index e663e0ff20..364f6b2640 100644 --- a/static/sass/layout/_gym-details.scss +++ b/static/sass/layout/_gym-details.scss @@ -342,7 +342,7 @@ &.sprite { @media screen and (max-width: 480px) { height: 48px !important; - width:: 48px !important; + width: 48px !important; } display: block; height: 96px; @@ -352,7 +352,7 @@ &.pokemon { @media screen and (max-width: 480px) { height: 30px !important; - width:: 30px !important; + width: 30px !important; } display: block; height: 48px; @@ -392,7 +392,7 @@ &.strength { @media screen and (max-width: 480px) { height: 12px !important; - width:: 12px !important; + width: 12px !important; } font-weight: 900; height: 16px; @@ -403,20 +403,55 @@ &.pokemon { font-size: 12px; + line-height: 1rem; - &.motivation { - font-weight: 900; + &.motivation.decayed:before { + content: '\f004'; + display: inline-block; + font-family: FontAwesome; + width: 24px; + -webkit-font-smoothing: antialiased; + } - &.heart { - @media screen and (max-width: 480px) { - height: 8px !important; - width: 8px !important; + &.motivation.decayed { + @media screen and (max-width: 480px) { + font-size: 9px !important; + } + font-weight: 900; + font-size: 16px; + white-space: nowrap; + &.zone { + &.good { + color: rgba(16,155,49,1); + } + &.average { + color: rgba(235,193,26,1); + } + &.bad { + color: rgba(255,25,25,1); } - height: 12px; - width: 12px; - vertical-align: middle; } } + + &.motivation.cp:before { + color: #fc6fb0; + content: '\f004'; + display: inline-block; + font-family: FontAwesome; + width: 12px; + -webkit-font-smoothing: antialiased; + } + + &.motivation.cp { + @media screen and (max-width: 480px) { + font-size: 8px !important; + } + color: #000; + font-size: 10px; + font-weight: 500; + text-align: inherit; + white-space: nowrap; + } } } diff --git a/static/sass/layout/_gym.scss b/static/sass/layout/_gym.scss index fafb1424bf..c3c22fc5fb 100644 --- a/static/sass/layout/_gym.scss +++ b/static/sass/layout/_gym.scss @@ -22,6 +22,21 @@ font-weight: 900; margin-bottom: 2px; text-decoration: underline; + + .team { + &.uncontested { + color: rgba(85,85,85,1); + } + &.mystic { + color: rgba(0,134,255,1); + } + &.valor { + color: rgba(255,26,26,1); + } + &.instinct { + color: rgba(255,159,25,1); + } + } } &.sprite { diff --git a/static01.zip b/static01.zip index de6a958cef..44d77df448 100644 Binary files a/static01.zip and b/static01.zip differ diff --git a/templates/bookmarklet.html b/templates/bookmarklet.html index c8ef8d5da2..0899f3b49f 100644 --- a/templates/bookmarklet.html +++ b/templates/bookmarklet.html @@ -14,5 +14,6 @@

Don't save THIS page in bookmarks!



You can drag the link directly to bookmarks menu in your browser.

Verify that the URL of the bookmark starts with javascript:.

+ diff --git a/templates/map.html b/templates/map.html index cfddd4abf3..ae8d1b5b4b 100644 --- a/templates/map.html +++ b/templates/map.html @@ -247,6 +247,29 @@

Hide Pokémon

+
+

Exclude Rarity

+ +
+ {% if show.encounter %} +
+

Pokémon Stats

+
+ + +
+
+ {% endif %} {% endif %}
@@ -379,11 +402,13 @@

Notify of Rarity

{% if show.encounter %} -
- +