[IMP] Update for win-blows compatibility, add geckdriver source

refactor_total
Brett Spaulding 2 years ago
parent 368f192107
commit e58ab73f6e

@ -1,247 +0,0 @@
<#
.Synopsis
Activate a Python virtual environment for the current PowerShell session.
.Description
Pushes the python executable for a virtual environment to the front of the
$Env:PATH environment variable and sets the prompt to signify that you are
in a Python virtual environment. Makes use of the command line switches as
well as the `pyvenv.cfg` file values present in the virtual environment.
.Parameter VenvDir
Path to the directory that contains the virtual environment to activate. The
default value for this is the parent of the directory that the Activate.ps1
script is located within.
.Parameter Prompt
The prompt prefix to display when this virtual environment is activated. By
default, this prompt is the name of the virtual environment folder (VenvDir)
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
.Example
Activate.ps1
Activates the Python virtual environment that contains the Activate.ps1 script.
.Example
Activate.ps1 -Verbose
Activates the Python virtual environment that contains the Activate.ps1 script,
and shows extra information about the activation as it executes.
.Example
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
Activates the Python virtual environment located in the specified location.
.Example
Activate.ps1 -Prompt "MyPython"
Activates the Python virtual environment that contains the Activate.ps1 script,
and prefixes the current prompt with the specified string (surrounded in
parentheses) while the virtual environment is active.
.Notes
On Windows, it may be required to enable this Activate.ps1 script by setting the
execution policy for the user. You can do this by issuing the following PowerShell
command:
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
For more information on Execution Policies:
https://go.microsoft.com/fwlink/?LinkID=135170
#>
Param(
[Parameter(Mandatory = $false)]
[String]
$VenvDir,
[Parameter(Mandatory = $false)]
[String]
$Prompt
)
<# Function declarations --------------------------------------------------- #>
<#
.Synopsis
Remove all shell session elements added by the Activate script, including the
addition of the virtual environment's Python executable from the beginning of
the PATH variable.
.Parameter NonDestructive
If present, do not remove this function from the global namespace for the
session.
#>
function global:deactivate ([switch]$NonDestructive) {
# Revert to original values
# The prior prompt:
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
}
# The prior PYTHONHOME:
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
}
# The prior PATH:
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
}
# Just remove the VIRTUAL_ENV altogether:
if (Test-Path -Path Env:VIRTUAL_ENV) {
Remove-Item -Path env:VIRTUAL_ENV
}
# Just remove VIRTUAL_ENV_PROMPT altogether.
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
}
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
}
# Leave deactivate function in the global namespace if requested:
if (-not $NonDestructive) {
Remove-Item -Path function:deactivate
}
}
<#
.Description
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
given folder, and returns them in a map.
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
two strings separated by `=` (with any amount of whitespace surrounding the =)
then it is considered a `key = value` line. The left hand string is the key,
the right hand is the value.
If the value starts with a `'` or a `"` then the first and last character is
stripped from the value before being captured.
.Parameter ConfigDir
Path to the directory that contains the `pyvenv.cfg` file.
#>
function Get-PyVenvConfig(
[String]
$ConfigDir
) {
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
# An empty map will be returned if no config file is found.
$pyvenvConfig = @{ }
if ($pyvenvConfigPath) {
Write-Verbose "File exists, parse `key = value` lines"
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
$pyvenvConfigContent | ForEach-Object {
$keyval = $PSItem -split "\s*=\s*", 2
if ($keyval[0] -and $keyval[1]) {
$val = $keyval[1]
# Remove extraneous quotations around a string value.
if ("'""".Contains($val.Substring(0, 1))) {
$val = $val.Substring(1, $val.Length - 2)
}
$pyvenvConfig[$keyval[0]] = $val
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
}
}
}
return $pyvenvConfig
}
<# Begin Activate script --------------------------------------------------- #>
# Determine the containing directory of this script
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
$VenvExecDir = Get-Item -Path $VenvExecPath
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
# Set values required in priority: CmdLine, ConfigFile, Default
# First, get the location of the virtual environment, it might not be
# VenvExecDir if specified on the command line.
if ($VenvDir) {
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
}
else {
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
Write-Verbose "VenvDir=$VenvDir"
}
# Next, read the `pyvenv.cfg` file to determine any required value such
# as `prompt`.
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
# Next, set the prompt from the command line, or the config file, or
# just use the name of the virtual environment folder.
if ($Prompt) {
Write-Verbose "Prompt specified as argument, using '$Prompt'"
}
else {
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
$Prompt = $pyvenvCfg['prompt'];
}
else {
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
$Prompt = Split-Path -Path $venvDir -Leaf
}
}
Write-Verbose "Prompt = '$Prompt'"
Write-Verbose "VenvDir='$VenvDir'"
# Deactivate any currently active virtual environment, but leave the
# deactivate function in place.
deactivate -nondestructive
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
# that there is an activated venv.
$env:VIRTUAL_ENV = $VenvDir
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
Write-Verbose "Setting prompt to '$Prompt'"
# Set the prompt to include the env name
# Make sure _OLD_VIRTUAL_PROMPT is global
function global:_OLD_VIRTUAL_PROMPT { "" }
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
function global:prompt {
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
_OLD_VIRTUAL_PROMPT
}
$env:VIRTUAL_ENV_PROMPT = $Prompt
}
# Clear PYTHONHOME
if (Test-Path -Path Env:PYTHONHOME) {
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
Remove-Item -Path Env:PYTHONHOME
}
# Add the venv to the PATH
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"

@ -1,69 +0,0 @@
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly
deactivate () {
# reset old environment variables
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
PATH="${_OLD_VIRTUAL_PATH:-}"
export PATH
unset _OLD_VIRTUAL_PATH
fi
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
export PYTHONHOME
unset _OLD_VIRTUAL_PYTHONHOME
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r 2> /dev/null
fi
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
PS1="${_OLD_VIRTUAL_PS1:-}"
export PS1
unset _OLD_VIRTUAL_PS1
fi
unset VIRTUAL_ENV
unset VIRTUAL_ENV_PROMPT
if [ ! "${1:-}" = "nondestructive" ] ; then
# Self destruct!
unset -f deactivate
fi
}
# unset irrelevant variables
deactivate nondestructive
VIRTUAL_ENV="/home/stonesoft/Apps/getDiscography"
export VIRTUAL_ENV
_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH
# unset PYTHONHOME if set
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
# could use `if (set -u; : $PYTHONHOME) ;` in bash
if [ -n "${PYTHONHOME:-}" ] ; then
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
unset PYTHONHOME
fi
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
_OLD_VIRTUAL_PS1="${PS1:-}"
PS1="(getDiscography) ${PS1:-}"
export PS1
VIRTUAL_ENV_PROMPT="(getDiscography) "
export VIRTUAL_ENV_PROMPT
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r 2> /dev/null
fi

@ -1,26 +0,0 @@
# This file must be used with "source bin/activate.csh" *from csh*.
# You cannot run it directly.
# Created by Davide Di Blasi <davidedb@gmail.com>.
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
# Unset irrelevant variables.
deactivate nondestructive
setenv VIRTUAL_ENV "/home/stonesoft/Apps/getDiscography"
set _OLD_VIRTUAL_PATH="$PATH"
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
set _OLD_VIRTUAL_PROMPT="$prompt"
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
set prompt = "(getDiscography) $prompt"
setenv VIRTUAL_ENV_PROMPT "(getDiscography) "
endif
alias pydoc python -m pydoc
rehash

@ -1,69 +0,0 @@
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
# (https://fishshell.com/); you cannot run it directly.
function deactivate -d "Exit virtual environment and return to normal shell environment"
# reset old environment variables
if test -n "$_OLD_VIRTUAL_PATH"
set -gx PATH $_OLD_VIRTUAL_PATH
set -e _OLD_VIRTUAL_PATH
end
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
set -e _OLD_VIRTUAL_PYTHONHOME
end
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
set -e _OLD_FISH_PROMPT_OVERRIDE
# prevents error when using nested fish instances (Issue #93858)
if functions -q _old_fish_prompt
functions -e fish_prompt
functions -c _old_fish_prompt fish_prompt
functions -e _old_fish_prompt
end
end
set -e VIRTUAL_ENV
set -e VIRTUAL_ENV_PROMPT
if test "$argv[1]" != "nondestructive"
# Self-destruct!
functions -e deactivate
end
end
# Unset irrelevant variables.
deactivate nondestructive
set -gx VIRTUAL_ENV "/home/stonesoft/Apps/getDiscography"
set -gx _OLD_VIRTUAL_PATH $PATH
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
# Unset PYTHONHOME if set.
if set -q PYTHONHOME
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
set -e PYTHONHOME
end
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
# fish uses a function instead of an env var to generate the prompt.
# Save the current fish_prompt function as the function _old_fish_prompt.
functions -c fish_prompt _old_fish_prompt
# With the original prompt function renamed, we can override with our own.
function fish_prompt
# Save the return status of the last command.
set -l old_status $status
# Output the venv prompt; color taken from the blue of the Python logo.
printf "%s%s%s" (set_color 4B8BBE) "(getDiscography) " (set_color normal)
# Restore the return status of the previous command.
echo "exit $old_status" | .
# Output the original/"old" prompt.
_old_fish_prompt
end
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
set -gx VIRTUAL_ENV_PROMPT "(getDiscography) "
end

@ -1,8 +0,0 @@
#!/home/stonesoft/Apps/getDiscography/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from flask.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

@ -1,8 +0,0 @@
#!/home/stonesoft/Apps/getDiscography/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from mutagen._tools.mid3cp import entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(entry_point())

@ -1,8 +0,0 @@
#!/home/stonesoft/Apps/getDiscography/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from mutagen._tools.mid3iconv import entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(entry_point())

@ -1,8 +0,0 @@
#!/home/stonesoft/Apps/getDiscography/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from mutagen._tools.mid3v2 import entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(entry_point())

@ -1,8 +0,0 @@
#!/home/stonesoft/Apps/getDiscography/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from mutagen._tools.moggsplit import entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(entry_point())

@ -1,8 +0,0 @@
#!/home/stonesoft/Apps/getDiscography/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from mutagen._tools.mutagen_inspect import entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(entry_point())

@ -1,8 +0,0 @@
#!/home/stonesoft/Apps/getDiscography/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from mutagen._tools.mutagen_pony import entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(entry_point())

@ -1,8 +0,0 @@
#!/home/stonesoft/Apps/getDiscography/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from charset_normalizer.cli.normalizer import cli_detect
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(cli_detect())

@ -1,8 +0,0 @@
#!/home/stonesoft/Apps/getDiscography/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

@ -1,8 +0,0 @@
#!/home/stonesoft/Apps/getDiscography/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

@ -1,8 +0,0 @@
#!/home/stonesoft/Apps/getDiscography/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

@ -1,8 +0,0 @@
#!/home/stonesoft/Apps/getDiscography/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pysondb.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

@ -1 +0,0 @@
python3

@ -1 +0,0 @@
/usr/bin/python3

@ -1 +0,0 @@
python3

@ -1,8 +0,0 @@
#!/home/stonesoft/Apps/getDiscography/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from tldextract.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

@ -1,8 +0,0 @@
#!/home/stonesoft/Apps/getDiscography/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from yt_dlp import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

@ -1,5 +1,6 @@
import json
import operator as oprtr
from const import *
from pysondb import PysonDB
@ -26,7 +27,7 @@ class Model:
# class objects that can be manipulated easier by things like update_by_id
def __init__(self, name):
self.env = PysonDB('/home/stonesoft/Apps/getDiscography/database/%s.json' % name)
self.env = PysonDB(CWD + '/database/%s.json' % name)
def _search(self, records, params):
"""

@ -0,0 +1,3 @@
[target.i686-pc-windows-gnu]
linker = "i686-w64-mingw32-gcc"
rustflags = "-C panic=abort"

File diff suppressed because it is too large Load Diff

@ -0,0 +1,2 @@
Please see our contributor documentation at
https://firefox-source-docs.mozilla.org/testing/geckodriver/#for-developers.

File diff suppressed because it is too large Load Diff

@ -0,0 +1,51 @@
[package]
edition = "2018"
name = "geckodriver"
version = "0.33.0"
authors = ["Mozilla"]
include = [
"/.cargo",
"/build.rs",
"/src"
]
description = "Proxy for using WebDriver clients to interact with Gecko-based browsers."
readme = "README.md"
keywords = [
"firefox",
"httpd",
"mozilla",
"w3c",
"webdriver",
]
license = "MPL-2.0"
repository = "https://hg.mozilla.org/mozilla-central/file/tip/testing/geckodriver"
[dependencies]
base64 = "0.13"
chrono = "0.4.6"
clap = { version = "~3.1", default-features = false, features = ["cargo", "std", "suggestions", "wrap_help"] }
hyper = "0.14"
lazy_static = "1.0"
log = { version = "0.4", features = ["std"] }
marionette = "0.4.0"
mozdevice = "0.5.1"
mozprofile = "0.9.1"
mozrunner = "0.15.1"
mozversion = "0.5.1"
regex = { version="1.0", default-features = false, features = ["perf", "std"] }
serde = "1.0"
serde_derive = "1.0"
serde_json = "1.0"
serde_yaml = "0.8"
tempfile = "3"
unicode-segmentation = "1.9"
url = "2.0"
uuid = { version = "1.0", features = ["v4"] }
webdriver = "0.48.0"
zip = { version = "0.6", default-features = false, features = ["deflate"] }
[dev-dependencies]
tempfile = "3"
[[bin]]
name = "geckodriver"

@ -0,0 +1,31 @@
## System
* Version: <!-- geckodriver version -->
* Platform: <!-- e.g. Linux/macOS/Windows + version -->
* Firefox: <!-- from the about dialogue -->
* Selenium: <!-- client + version -->
## Testcase
<!--
Please provide a minimal HTML document which permits the problem
to be reproduced.
-->
## Stacktrace
<!--
Error and stacktrace produced by client.
-->
## Trace-level log
<!--
See https://searchfox.org/mozilla-central/source/testing/geckodriver/doc/TraceLogs.md
for how to produce a trace-level log.
For trace logs with more than 20 lines please add its contents as attachment.
-->

@ -0,0 +1,385 @@
Mozilla Public License Version 2.0
==================================
1. Definitions
--------------
1.1. "Contributor"
means each individual or legal entity that creates, contributes to
the creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used
by a Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached
the notice in Exhibit A, the Executable Form of such Source Code
Form, and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. "Incompatible With Secondary Licenses"
means
(a) that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
(b) that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the
terms of a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in
a separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently, any and
all of the rights conveyed by this License.
1.10. "Modifications"
means any of the following:
(a) any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered
Software; or
(b) any new file in Source Code Form that contains any Covered
Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the
License, by the making, using, selling, offering for sale, having
made, import, or transfer of either its Contributions or its
Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU
Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those
licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that
controls, is controlled by, or is under common control with You. For
purposes of this definition, "control" means (a) the power, direct
or indirect, to cause the direction or management of such entity,
whether by contract or otherwise, or (b) ownership of more than
fifty percent (50%) of the outstanding shares or beneficial
ownership of such entity.
2. License Grants and Conditions
--------------------------------
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
(a) under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
(b) under Patent Claims of such Contributor to make, use, sell, offer
for sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
(a) for any code that a Contributor has removed from Covered Software;
or
(b) for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
(c) under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.
3. Responsibilities
-------------------
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more
than the cost of distribution to the recipient; and
(b) You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------
If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.
5. Termination
--------------
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
************************************************************************
* *
* 6. Disclaimer of Warranty *
* ------------------------- *
* *
* Covered Software is provided under this License on an "as is" *
* basis, without warranty of any kind, either expressed, implied, or *
* statutory, including, without limitation, warranties that the *
* Covered Software is free of defects, merchantable, fit for a *
* particular purpose or non-infringing. The entire risk as to the *
* quality and performance of the Covered Software is with You. *
* Should any Covered Software prove defective in any respect, You *
* (not any Contributor) assume the cost of any necessary servicing, *
* repair, or correction. This disclaimer of warranty constitutes an *
* essential part of this License. No use of any Covered Software is *
* authorized under this License except under this disclaimer. *
* *
************************************************************************
************************************************************************
* *
* 7. Limitation of Liability *
* -------------------------- *
* *
* Under no circumstances and under no legal theory, whether tort *
* (including negligence), contract, or otherwise, shall any *
* Contributor, or anyone who distributes Covered Software as *
* permitted above, be liable to You for any direct, indirect, *
* special, incidental, or consequential damages of any character *
* including, without limitation, damages for lost profits, loss of *
* goodwill, work stoppage, computer failure or malfunction, or any *
* and all other commercial damages or losses, even if such party *
* shall have been informed of the possibility of such damages. This *
* limitation of liability shall not apply to liability for death or *
* personal injury resulting from such party's negligence to the *
* extent applicable law prohibits such limitation. Some *
* jurisdictions do not allow the exclusion or limitation of *
* incidental or consequential damages, so this exclusion and *
* limitation may not apply to You. *
* *
************************************************************************
8. Litigation
-------------
Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.
9. Miscellaneous
----------------
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.
10. Versions of the License
---------------------------
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
-------------------------------------------
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at <http://mozilla.org/MPL/2.0/>.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------
This Source Code Form is "Incompatible With Secondary Licenses", as
defined by the Mozilla Public License, v. 2.0.

@ -0,0 +1,85 @@
geckodriver
===========
Proxy for using W3C [WebDriver] compatible clients to interact with
Gecko-based browsers.
This program provides the HTTP API described by the [WebDriver
protocol] to communicate with Gecko browsers, such as Firefox. It
translates calls into the [Marionette remote protocol] by acting
as a proxy between the local- and remote ends.
[WebDriver protocol]: https://w3c.github.io/webdriver/#protocol
[Marionette remote protocol]: https://firefox-source-docs.mozilla.org/testing/marionette/
[WebDriver]: https://developer.mozilla.org/en-US/docs/Web/WebDriver
Downloads
---------
* [Releases](https://github.com/mozilla/geckodriver/releases/latest)
* [Change log](https://searchfox.org/mozilla-central/source/testing/geckodriver/CHANGES.md)
Documentation
-------------
* [WebDriver] (work in progress)
* [Commands](https://developer.mozilla.org/en-US/docs/Web/WebDriver/Commands)
* [Errors](https://developer.mozilla.org/en-US/docs/Web/WebDriver/Errors)
* [Types](https://developer.mozilla.org/en-US/docs/Web/WebDriver/Types)
* [Cross browser testing](https://developer.mozilla.org/en-US/docs/Learn/Tools_and_testing/Cross_browser_testing)
* [Selenium](https://seleniumhq.github.io/docs/) (work in progress)
* [C# API](https://seleniumhq.github.io/selenium/docs/api/dotnet/)
* [JavaScript API](https://seleniumhq.github.io/selenium/docs/api/javascript/)
* [Java API](https://seleniumhq.github.io/selenium/docs/api/java/)
* [Perl API](https://metacpan.org/pod/Selenium::Remote::Driver)
* [Python API](https://seleniumhq.github.io/selenium/docs/api/py/)
* [Ruby API](https://seleniumhq.github.io/selenium/docs/api/rb/)
* [geckodriver usage](https://firefox-source-docs.mozilla.org/testing/geckodriver/Usage.html)
* [Supported platforms](https://firefox-source-docs.mozilla.org/testing/geckodriver/Support.html)
* [Firefox capabilities](https://firefox-source-docs.mozilla.org/testing/geckodriver/Capabilities.html)
* [Capabilities example](https://firefox-source-docs.mozilla.org/testing/geckodriver/Capabilities.html#capabilities-example)
* [Enabling trace logs](https://firefox-source-docs.mozilla.org/testing/geckodriver/TraceLogs.html)
* [Analyzing crash data from Firefox](https://firefox-source-docs.mozilla.org/testing/geckodriver/CrashReports.html)
* [Contributing](https://firefox-source-docs.mozilla.org/testing/geckodriver/#for-developers)
* [Building](https://firefox-source-docs.mozilla.org/testing/geckodriver/Building.html)
* [Testing](https://firefox-source-docs.mozilla.org/testing/geckodriver/Testing.html)
* [Releasing](https://firefox-source-docs.mozilla.org/testing/geckodriver/Releasing.html)
* [Self-serving an ARM build](https://firefox-source-docs.mozilla.org/testing/geckodriver/ARM.html)
Source code
-----------
geckodriver is made available under the [Mozilla Public License].
Its source code can be found in [mozilla-central] under testing/geckodriver.
This GitHub repository is only used for issue tracking and making releases.
[source code]: https://hg.mozilla.org/mozilla-unified/file/tip/testing/geckodriver
[Mozilla Public License]: https://www.mozilla.org/en-US/MPL/2.0/
[mozilla-central]: https://hg.mozilla.org/mozilla-central/file/tip/testing/geckodriver
Custom release builds
---------------------
If a binary is not available for your platform, it's possibe to create a custom
build using the [Rust] toolchain. To do this, checkout the release tag for the
version of interest and run `cargo build`. Alternatively the latest version may
be built and installed from `crates.io` using `cargo install geckodriver`.
[Rust]: https://rustup.rs/
Contact
-------
The mailing list for geckodriver discussion is
https://groups.google.com/a/mozilla.org/g/dev-webdriver.
There is also an Element channel to talk about using and developing
geckodriver on `#webdriver:mozilla.org <https://chat.mozilla.org/#/room/#webdriver:mozilla.org>`__

@ -0,0 +1,136 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// Writes build information to ${OUT_DIR}/build-info.rs which is included in
// the program during compilation:
//
// ```no_run
// const COMMIT_HASH: Option<&'static str> = Some("c31a366");
// const COMMIT_DATE: Option<&'static str> = Some("1988-05-10");
// ```
//
// The values are `None` if running hg failed, e.g. if it is not installed or
// if we are not in an hg repo.
use std::env;
use std::ffi::OsStr;
use std::fs::File;
use std::io;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::process::Command;
fn main() -> io::Result<()> {
let cur_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
let build_info = get_build_info(&cur_dir);
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
let mut fh = File::create(out_dir.join("build-info.rs"))?;
writeln!(
fh,
"const COMMIT_HASH: Option<&'static str> = {:?};",
build_info.hash()
)?;
writeln!(
fh,
"const COMMIT_DATE: Option<&'static str> = {:?};",
build_info.date()
)?;
Ok(())
}
fn get_build_info(dir: &Path) -> Box<dyn BuildInfo> {
if Path::exists(&dir.join(".hg")) {
Box::new(Hg {})
} else if Path::exists(&dir.join(".git")) {
Box::new(Git {})
} else if let Some(parent) = dir.parent() {
get_build_info(parent)
} else {
eprintln!("unable to detect vcs");
Box::new(Noop {})
}
}
trait BuildInfo {
fn hash(&self) -> Option<String>;
fn date(&self) -> Option<String>;
}
struct Hg;
impl Hg {
fn exec<I, S>(&self, args: I) -> Option<String>
where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
Command::new("hg")
.env("HGPLAIN", "1")
.args(args)
.output()
.ok()
.and_then(|r| String::from_utf8(r.stdout).ok())
.map(|s| s.trim_end().into())
}
}
impl BuildInfo for Hg {
fn hash(&self) -> Option<String> {
self.exec(["log", "-r.", "-T{node|short}"])
}
fn date(&self) -> Option<String> {
self.exec(["log", "-r.", "-T{date|isodate}"])
}
}
struct Git;
impl Git {
fn exec<I, S>(&self, args: I) -> Option<String>
where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
Command::new("git")
.env("GIT_CONFIG_NOSYSTEM", "1")
.args(args)
.output()
.ok()
.and_then(|r| String::from_utf8(r.stdout).ok())
.map(|s| s.trim_end().into())
}
fn to_hg_sha(&self, git_sha: String) -> Option<String> {
self.exec(["cinnabar", "git2hg", &git_sha])
}
}
impl BuildInfo for Git {
fn hash(&self) -> Option<String> {
self.exec(["rev-parse", "HEAD"])
.and_then(|sha| self.to_hg_sha(sha))
.map(|mut s| {
s.truncate(12);
s
})
}
fn date(&self) -> Option<String> {
self.exec(["log", "-1", "--date=short", "--pretty=format:%cd"])
}
}
struct Noop;
impl BuildInfo for Noop {
fn hash(&self) -> Option<String> {
None
}
fn date(&self) -> Option<String> {
None
}
}

Binary file not shown.

@ -0,0 +1,50 @@
# Self-serving an ARM build
Mozilla announced the intent to deprecate ARMv7 HF builds of
geckodriver in September 2018. This does not mean you can no longer
use geckodriver on ARM systems, and this document explains how you
can self-service a build for ARMv7 HF.
Assuming you have already checked out [central], the steps to
cross-compile ARMv7 from a Linux host system is as follows:
1. If you dont have Rust installed:
```shell
% curl https://sh.rustup.rs -sSf | sh
```
2. Install cross-compiler toolchain:
```shell
% apt install gcc-arm-linux-gnueabihf libc6-armhf-cross libc6-dev-armhf-cross
```
3. Create a new shell, or to reuse the existing shell:
```shell
% source $HOME/.cargo/env
```
4. Install rustc target toolchain:
```shell
% rustup target install armv7-unknown-linux-gnueabihf
```
5. Put this in [testing/geckodriver/.cargo/config]:
```rust
[target.armv7-unknown-linux-gnueabihf]
linker = "arm-linux-gnueabihf-gcc"
```
6. Build geckodriver from testing/geckodriver:
```shell
% cd testing/geckodriver
% cargo build --release --target armv7-unknown-linux-gnueabihf
```
[central]: https://hg.mozilla.org/mozilla-central/
[testing/geckodriver/.cargo/config]: https://searchfox.org/mozilla-central/source/testing/geckodriver/.cargo/config

@ -0,0 +1,45 @@
# Reporting bugs
When opening a new issue or commenting on existing issues, please
make sure discussions are related to concrete technical issues
with geckodriver or Marionette. Questions or discussions are more
appropriate for the [mailing list].
For issue reports to be actionable, it must be clear exactly
what the observed and expected behaviours are, and how to set up
the state required to observe the erroneous behaviour. The most
useful thing to provide is a minimal HTML document which permits
the problem to be reproduced along with a [trace-level log] from
geckodriver showing the exact wire protocol calls made.
Because of the wide variety and different charateristics of clients
used with geckodriver, their stacktraces, logs, and code examples are
typically not very useful as they distract from the actual underlying
cause. **For this reason, we cannot overstate the importance of
always providing the [trace-level log] from geckodriver.** Bugs
relating to a specific client should be filed with that project.
We welcome you to file issues in the [GitHub issue tracker] once you are
confident it has not already been reported. The [ISSUE_TEMPLATE.md]
contains a helpful checklist for things we will want to know about
the affected system, reproduction steps, and logs.
geckodriver development follows a rolling release model as
we dont release patches for older versions. It is therefore
useful to use the tip-of-tree geckodriver binary, or failing this,
the latest release when verifying the problem. geckodriver is only
compatible with the current release channel versions of Firefox, and
it consequently does not help to report bugs that affect outdated
and unsupported Firefoxen. Please always try to verify the issue
in the latest Firefox Nightly before you file your bug.
Once we are satisfied the issue raised is of sufficiently actionable
character, we will continue with triaging it and file a bug where it
is appropriate. Bugs specific to geckodriver will be filed in the
[`Testing :: geckodriver`] component in Bugzilla.
[mailing list]: index.rst/#communication
[trace-level log]: TraceLogs.md
[GitHub issue tracker]: https://github.com/mozilla/geckodriver/issues
[ISSUE_TEMPLATE.md]: https://raw.githubusercontent.com/mozilla/geckodriver/master/ISSUE_TEMPLATE.md
[`Testing :: geckodriver`]: https://bugzilla.mozilla.org/buglist.cgi?component=geckodriver

@ -0,0 +1,46 @@
# Building geckodriver
geckodriver is written in [Rust], a systems programming language
from Mozilla. Crucially, it relies on the [webdriver crate] to
provide the HTTPD and do most of the heavy lifting of marshalling
the WebDriver protocol. geckodriver translates WebDriver [commands],
[responses], and [errors] to the [Marionette protocol], and acts
as a proxy between [WebDriver] and [Marionette].
To build geckodriver:
```shell
% ./mach build testing/geckodriver
```
If you use artifact builds you may build geckodriver using cargo,
since mach in this case does not have a compile environment:
```shell
% cd testing/geckodriver
% cargo build
Compiling geckodriver v0.21.0 (file:///code/gecko/testing/geckodriver)
Finished dev [optimized + debuginfo] target(s) in 7.83s
```
Because all Rust code in central shares the same cargo workspace,
the binary will be put in the `$(topsrcdir)/target` directory.
You can run your freshly built geckodriver this way:
```shell
% ./mach geckodriver -- --other --flags
```
See [Testing](Testing.md) for how to run tests.
[Rust]: https://www.rust-lang.org/
[webdriver crate]: https://crates.io/crates/webdriver
[commands]: https://docs.rs/webdriver/newest/webdriver/command/
[responses]: https://docs.rs/webdriver/newest/webdriver/response/
[errors]: https://docs.rs/webdriver/newest/webdriver/error/enum.ErrorStatus.html
[Marionette protocol]: /testing/marionette/Protocol.md
[WebDriver]: https://w3c.github.io/webdriver/
[Marionette]: /testing/marionette/index.rst

@ -0,0 +1,98 @@
# Firefox capabilities
geckodriver has a few capabilities that are specific to Firefox.
Most of these [are documented on MDN](https://developer.mozilla.org/en-US/docs/Web/WebDriver/Capabilities/firefoxOptions).
We additionally have some capabilities that largely are implementation
concerns that normal users should not care about:
## `moz:debuggerAddress`
A boolean value to indicate if Firefox has to be started with the
[Remote Protocol] enabled, which is a low-level debugging interface that
implements a subset of the [Chrome DevTools Protocol] (CDP).
When enabled the returned `moz:debuggerAddress` capability of the `New Session`
command is the `host:port` combination of a server that supports the following
HTTP endpoints:
### GET /json/version
The browser version metadata:
```json
{
"Browser": "Firefox/84.0a1",
"Protocol-Version": "1.0",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:84.0) Gecko/20100101 Firefox/84.0",
"V8-Version": "1.0",
"WebKit-Version": "1.0",
"webSocketDebuggerUrl": "ws://localhost:9222/devtools/browser/fe507083-2960-a442-bbd7-7dfe1f111c05"
}
```
### GET /json/list
A list of all available websocket targets:
```json
[ {
"description": "",
"devtoolsFrontendUrl": null,
"faviconUrl": "",
"id": "ecbf9028-676a-1b40-8596-a5edc0e2875b",
"type": "page",
"url": "https://www.mozilla.org/en-US/",
"browsingContextId": 29,
"webSocketDebuggerUrl": "ws://localhost:9222/devtools/page/ecbf9028-676a-1b40-8596-a5edc0e2875b"
} ]
```
The contained `webSocketDebuggerUrl` entries can be used to connect to the
websocket and interact with the browser by using the CDP protocol.
[Remote Protocol]: /remote/index.rst
[Chrome DevTools Protocol]: https://chromedevtools.github.io/devtools-protocol/
## `moz:useNonSpecCompliantPointerOrigin`
A boolean value to indicate how the pointer origin for an action
command will be calculated.
With Firefox 59 the calculation will be based on the requirements
by the [WebDriver] specification. This means that the pointer origin
is no longer computed based on the top and left position of the
referenced element, but on the in-view center point.
To temporarily disable the WebDriver conformant behavior use `false`
as value for this capability.
Please note that this capability exists only temporarily, and that
it will be removed once all Selenium bindings can handle the new
behavior.
## `moz:webdriverClick`
A boolean value to indicate which kind of interactability checks
to run when performing a click or sending keys to an elements. For
Firefoxen prior to version 58.0 some legacy code as imported from
an older version of FirefoxDriver was in use.
With Firefox 58 the interactability checks as required by the
[WebDriver] specification are enabled by default. This means
geckodriver will additionally check if an element is obscured by
another when clicking, and if an element is focusable for sending
keys.
Because of this change in behaviour, we are aware that some extra
errors could be returned. In most cases the test in question might
have to be updated so it's conform with the new checks. But if the
problem is located in geckodriver, then please raise an issue in
the [issue tracker].
To temporarily disable the WebDriver conformant checks use `false`
as value for this capability.
Please note that this capability exists only temporarily, and that
it will be removed once the interactability checks have been
stabilized.

@ -0,0 +1,67 @@
# Analyzing crash data of Firefox
It's not uncommon that under some special platform configurations and while
running automated tests via Selenium and geckodriver Firefox could crash. In
those cases it is very helpful to retrieve the generated crash data aka
minidump files, and report these to us.
## Retrieve the crash data
Because geckodriver creates a temporary user profile for Firefox, it also
automatically removes all its folders once the tests have been finished. That
also means that if Firefox crashed the created minidump files are lost. To
prevent that a custom profile has to be used instead. The following code
shows an example by using the Python Selenium bindings on Mac OS:
```python
import tempfile
from selenium import webdriver
from selenium.webdriver.firefox.options import Options
# Custom profile folder to keep the minidump files
profile = tempfile.mkdtemp(".selenium")
print("*** Using profile: {}".format(profile))
# Use the above folder as custom profile
opts = Options()
opts.add_argument("-profile")
opts.add_argument(profile)
opts.binary = "/Applications/Firefox.app/Contents/MacOS/firefox"
driver = webdriver.Firefox(
options=opts,
# hard-code the Marionette port so geckodriver can connect
service_args=["--marionette-port", "2828"]
)
# Your test code which crashes Firefox
```
Executing the test with Selenium now, which triggers the crash of Firefox
will leave all the files from the user profile around in the above path.
To retrieve the minidump files navigate to that folder and look for a sub
folder with the name `minidumps`. It should contain at least one series of
files. One file with the `.dmp` extension and another one with `.extra`.
Both of those files are needed. If more crash files are present grab them all.
Attach the files as best archived as zip file to the created [geckodriver issue]
on Github.
[geckodriver issue]: https://github.com/mozilla/geckodriver/issues/new
## Getting details of the crash
More advanced users can upload the generated minidump files themselves and
receive details information about the crash. Therefore find the [crash reporter]
folder and copy all the generated minidump files into the `pending` sub directory.
Make sure that both the `.dmp` and `.extra` files are present.
Once done you can also [view the crash reports].
If you submitted a crash please do not forget to also add the link of the
crash report to the geckodriver issue.
[crash reporter]: https://support.mozilla.org/kb/mozillacrashreporter#w_viewing-reports-outside-of-firefox
[view the crash reports]: https://support.mozilla.orgkb/mozillacrashreporter#w_viewing-crash-reports

@ -0,0 +1,218 @@
<!-- markdownlint-disable MD033 -->
# Flags
## <code>--allow-hosts <var>ALLOW_HOSTS</var>...</code>
Values of the `Host` header to allow for incoming requests.
By default the value of <var>HOST</var> is allowed. If `--allow-hosts`
is provided, exactly the given values will be permitted. For example
`--allow-host geckodriver.test webdriver.local` will allow requests
with `Host` set to `geckodriver.test` or `webdriver.local`.
Requests with `Host` set to an IP address are always allowed.
## <code>--allow-origins <var>ALLOW_ORIGINS</var>...</code>
Values of the `Origin` header to allow for incoming requests.
`Origin` is set by web browsers for all `POST` requests, and most
other cross-origin requests. By default any request with an `Origin`
header is rejected to protect against malicious websites trying to
access geckodriver running on the local machine.
If `--allow-origins` is provided, web services running on the given
origin will be able to make requests to geckodriver. For example
`--allow-origins https://webdriver.test:8080` will allow a web-based
service on the origin with scheme `https`, hostname `webdriver.test`,
and port `8080` to access the geckodriver instance.
## <code>&#x2D;&#x2D;android-storage <var>ANDROID_STORAGE</var></code>
**Deprecation warning**: This argument is deprecated and planned to be removed
with the 0.31.0 release of geckodriver. As such it shouldn't be used with version
0.30.0 or later anymore. By default the automatic detection will now use the
external storage location, which is always readable and writeable.
Selects the test data location on the Android device, eg. the Firefox profile.
By default `auto` is used.
<style type="text/css">
table { width: 100%; margin-bottom: 2em; }
table, th, td { border: solid gray 1px; }
td, th { padding: 10px; text-align: left; vertical-align: middle; }
td:nth-child(1), th:nth-child(1) { width: 10em; text-align: center; }
</style>
<table>
<thead>
<tr>
<th>Value
<th>Description
</tr>
</thead>
<tr>
<td>auto
<td>Best suitable location based on whether the device is rooted.<br/>
If the device is rooted <code>internal</code> is used, otherwise <code>app</code>.
<tr>
<td>app
<td><p>Location: <code>/data/data/%androidPackage%/test_root</code></p>
Based on the <code>androidPackage</code> capability that is passed as part of
<code>moz:firefoxOptions</code> when creating a new session. Commands that
change data in the app's directory are executed using run-as. This requires
that the installed app is debuggable.
<tr>
<td>internal
<td><p>Location: <code>/data/local/tmp/test_root</code></p>
The device must be rooted since when the app runs, files that are created
in the profile, which is owned by the app user, cannot be changed by the
shell user. Commands will be executed via <code>su</code>.
<tr>
<td>sdcard
<td><p>Location: <code>$EXTERNAL_STORAGE/Android/data/%androidPackage%/files/test_root</code></p>
This location is supported by all versions of Android whether if the device
is rooted or not.
</table>
## <code>-b <var>BINARY</var></code> / <code>&#x2D;&#x2D;binary <var>BINARY</var></code>
Path to the Firefox binary to use. By default geckodriver tries to
find and use the system installation of Firefox, but that behaviour
can be changed by using this option. Note that the `binary`
capability of the `moz:firefoxOptions` object that is passed when
[creating a new session] will override this option.
On Linux systems it will use the first _firefox_ binary found
by searching the `PATH` environmental variable, which is roughly
equivalent to calling [whereis(1)] and extracting the second column:
```shell
% whereis firefox
firefox: /usr/bin/firefox /usr/local/firefox
```
On macOS, the binary is found by looking for the first _firefox-bin_
binary in the same fashion as on Linux systems. This means it is
possible to also use `PATH` to control where geckodriver should
find Firefox on macOS. It will then look for _/Applications/Firefox.app_.
On Windows systems, geckodriver looks for the system Firefox by
scanning the Windows registry.
[creating a new session]: https://w3c.github.io/webdriver/#new-session
[whereis(1)]: http://www.manpagez.com/man/1/whereis/
## <code>&#x2D;&#x2D;connect-existing</code>
Connect geckodriver to an existing Firefox instance. This means
geckodriver will abstain from the default of starting a new Firefox
session.
The existing Firefox instance must have [Marionette] enabled.
To enable the remote protocol in Firefox, you can pass the
`-marionette` flag. Unless the `marionette.port` preference
has been user-set, Marionette will listen on port 2828. So when
using `--connect-existing` it is likely you will also have to use
`--marionette-port` to set the correct port.
`--marionette-port`: #marionette-port
## <code>&#x2D;&#x2D;host <var>HOST</var></code>
Host to use for the WebDriver server. Defaults to 127.0.0.1.
## <code>&#x2D;&#x2D;jsdebugger</code>
Attach [browser toolbox] debugger when Firefox starts. This is
useful for debugging [Marionette] internals.
To be prompted at the start of the test run or between tests,
you can set the `marionette.debugging.clicktostart` preference to
`true`.
For reference, below is the list of preferences that enables the
chrome debugger. These are all set implicitly when the
argument is passed to geckodriver.
* `devtools.browsertoolbox.panel` -> `jsdebugger`
Selects the Debugger panel by default.
* `devtools.chrome.enabled` → true
Enables debugging of chrome code.
* `devtools.debugger.prompt-connection` → false
Controls the remote connection prompt. Note that this will
automatically expose your Firefox instance to localhost.
* `devtools.debugger.remote-enabled` → true
Allows a remote debugger to connect, which is necessary for
debugging chrome code.
[browser toolbox]: https://developer.mozilla.org/en-US/docs/Tools/Browser_Toolbox
## <code>&#x2D;&#x2D;log <var>LEVEL</var></code>
Set the Gecko and geckodriver log level. Possible values are `fatal`,
`error`, `warn`, `info`, `config`, `debug`, and `trace`.
## <code>&#x2D;&#x2D;log-no-truncate</code>
Disables truncation of long log lines.
## <code>&#x2D;&#x2D;marionette-host <var>HOST</var></code>
Selects the host for geckodrivers connection to the [Marionette]
remote protocol. Defaults to 127.0.0.1.
## <code>&#x2D;&#x2D;marionette-port <var>PORT</var></code>
Selects the port for geckodrivers connection to the [Marionette]
remote protocol.
In the default mode where geckodriver starts and manages the Firefox
process, it will pick a free port assigned by the system and set the
`marionette.port` preference in the profile.
When `--connect-existing` is used and the Firefox process is not
under geckodrivers control, it will simply connect to <var>PORT</var>.
`--connect-existing`: #connect-existing
## <code>-p <var>PORT</var></code> / <code>&#x2D;&#x2D;port <var>PORT</var></code>
Port to use for the WebDriver server. Defaults to 4444.
A helpful trick is that it is possible to bind to 0 to get the
system to atomically assign a free port.
## <code>&#x2D;&#x2D;profile-root <var>PROFILE_ROOT</var></code>
Path to the directory to use when creating temporary profiles. By
default this is the system temporary directory. Both geckodriver and
Firefox must have read-write access to this path.
This setting can be useful when Firefox is sandboxed from the host
filesystem such that it doesn't share the same system temporary
directory as geckodriver (e.g. when running Firefox inside a container
or packaged as a snap).
## <code>-v<var>[v]</var></code>
Increases the logging verbosity by to debug level when passing
a single `-v`, or to trace level if `-vv` is passed. This is
analogous to passing `--log debug` and `--log trace`, respectively.
## <code>&#x2D;&#x2D;websocket-port<var>PORT</var></code>
Port to use to connect to WebDriver BiDi. Defaults to 9222.
A helpful trick is that it is possible to bind to 0 to get the
system to atomically assign a free port.
[Marionette]: /testing/marionette/index.rst

@ -0,0 +1,44 @@
# MacOS notarization
With the introduction of macOS 10.15 “Catalina” Apple introduced
[new notarization requirements] that all software must be signed
and notarized centrally.
Whilst the geckodriver binary is technically both signed and notarized, the
actual validation can only be performed by MacOS if the machine that starts
the geckodriver binary for the very first time is online. Offline validation
would require shipping geckodriver as a DMG/PKG. You can track the relevant
progress in [bug 1783943].
Note: geckodriver releases between 0.26.0 and 0.31.0 don't have the
notarization applied and always require the manual steps below to
bypass the notarization requirement of the binary during the very first start.
[new notarization requirements]: https://developer.apple.com/news/?id=04102019a
[bug 1783943]: https://bugzilla.mozilla.org/show_bug.cgi?id=1783943
## Offline mode
There are some mitigating circumstances:
* Verification problems only occur when other notarized programs,
such as a web browser, downloads the software from the internet.
* Arbitrary software downloaded through other means, such as
curl(1) is _not_ affected by this change.
In other words, if your method for fetching geckodriver on macOS
is through the GitHub web UI using a web browser, the program will
not be able to run unless you manually disable the quarantine check
(explained below). If downloading geckodriver via other means
than a macOS notarized program, you should not be affected.
To bypass the notarization requirement on macOS if you have downloaded
the geckodriver .tar.gz via a web browser, you can run the following
command in a terminal:
% xattr -r -d com.apple.quarantine geckodriver
A problem with notarization will manifest itself through a security
dialogue appearing, explaining that the source of the program is
not trusted.

@ -0,0 +1,31 @@
# Submitting patches
You can submit patches by using [Phabricator]. Walk through its documentation
in how to set it up, and uploading patches for review. Don't worry about which
person to select for reviewing your code. It will be done automatically.
Please also make sure to follow the [commit creation guidelines].
Once you have contributed a couple of patches, we are happy to sponsor you in
[becoming a Mozilla committer]. When you have been granted commit access
level 1, you will have permission to use the [Firefox CI] to trigger your own
“try runs” to test your changes. You can use the following [try preset] to run
the most relevant tests:
```shell
% ./mach try --preset geckodriver
```
This preset will schedule geckodriver-related tests on various platforms. You can
reduce the number of tasks by filtering on platforms (e.g. linux) or build type
(e.g. opt):
```shell
% ./mach try --preset geckodriver -xq "'linux 'opt"
```
[Phabricator]: https://moz-conduit.readthedocs.io/en/latest/phabricator-user.html
[commit creation guidelines]: https://mozilla-version-control-tools.readthedocs.io/en/latest/devguide/contributing.html?highlight=phabricator#submitting-patches-for-review
[becoming a Mozilla committer]: https://www.mozilla.org/en-US/about/governance/policies/commit/
[Firefox CI]: https://treeherder.mozilla.org/
[try preset]: https://firefox-source-docs.mozilla.org/tools/try/presets.html

@ -0,0 +1,103 @@
# Profiles
geckodriver uses [profiles] to instrument Firefox behaviour. The
user will usually rely on geckodriver to generate a temporary,
throwaway profile. These profiles are deleted when the WebDriver
session expires.
In cases where the user needs to use custom, prepared profiles,
geckodriver will make modifications to the profile that ensures
correct behaviour. See [_Automation preferences_] below on the
precedence of user-defined preferences in this case.
Custom profiles can be provided two different ways:
1. by appending `--profile /some/location` to the [`args` capability],
which will instruct geckodriver to use the profile _in-place_;
2. or by setting the [`profile` capability] to a Base64-encoded
ZIP of the profile directory.
Note that geckodriver has a [known bug concerning `--profile`] that
prevents the randomised Marionette port from being passed to
geckodriver. To circumvent this issue, make sure you specify the
port manually using `--marionette-port <port>`.
The second way is compatible with shipping Firefox profiles across
a network, when for example the geckodriver instance is running on
a remote system. This is the case when using Seleniums `RemoteWebDriver`
concept, where the WebDriver client and the server are running on
two distinct systems.
[profiles]: https://support.mozilla.org/en-US/kb/profiles-where-firefox-stores-user-data
[_Automation preferences_]: #automation-preferences
[`args` capability]: https://developer.mozilla.org/en-US/docs/Web/WebDriver/Capabilities/firefoxOptions#args_array_of_strings
[`profile` capability]: https://developer.mozilla.org/en-US/docs/Web/WebDriver/Capabilities/firefoxOptions#profile_string
[known bug concerning `--profile`]: https://github.com/mozilla/geckodriver/issues/1058
## Default locations for temporary profiles
When a custom user profile is not provided with the `-profile`
command-line argument geckodriver generates a temporary, throwaway
profile. This is written to the default system temporary folder
and subsequently removed when the WebDriver session expires.
The default location for temporary profiles depends on the system.
On Unix systems it uses /tmp, and on Windows it uses the Windows
directory.
The default location can be overridden. On Unix you set the `TMPDIR`
environment variable. On Windows, the following environment variables
are respected, in order:
1. `TMP`
2. `TEMP`
3. `USERPROFILE`
It is not necessary to change the temporary directory system-wide.
All you have to do is make sure it gets set for the environment of
the geckodriver process:
TMPDIR=/some/location ./geckodriver
## Automation preferences
As indicated in the introduction, geckodriver configures Firefox
so it is well-behaved in automation environments. It uses a
combination of preferences written to the profile prior to launching
Firefox (1), and a set of recommended preferences set on startup (2).
These can be perused here:
1. [testing/geckodriver/src/prefs.rs](https://searchfox.org/mozilla-central/source/testing/geckodriver/src/prefs.rs)
2. [remote/components/marionette.js](https://searchfox.org/mozilla-central/source/remote/components/marionette.js)
As mentioned, these are _recommended_ preferences, and any user-defined
preferences in the [user.js file] or as part of the [`prefs` capability]
take precedence. This means for example that the user can tweak
`browser.startup.page` to override the recommended preference for
starting the browser with a blank page.
The recommended preferences set at runtime (see 2 above) may also
be disabled entirely by setting `remote.prefs.recommended` starting with Firefox
91. For older versions of Firefox, the preference to use was
`marionette.prefs.recommended`.
This may however cause geckodriver to not behave correctly according
to the WebDriver standard, so it should be used with caution.
Users should take note that the `marionette.port` preference is
special, and will always be overridden when using geckodriver unless
the `--marionette-port <port>` flag is used specifically to instruct
the Marionette server in Firefox which port to use.
[user.js file]: http://kb.mozillazine.org/User.js_file
[`prefs` capability]: https://developer.mozilla.org/en-US/docs/Web/WebDriver/Capabilities/firefoxOptions#prefs_preferences_object
## Temporary profiles not being removed
It is a known bug that geckodriver in some instances fails to remove
the temporary profile, particularly when the session is not explicitly
deleted or the process gets interrupted. See [geckodriver issue
299] for more information.
[geckodriver issue 299]: https://github.com/mozilla/geckodriver/issues/299

@ -0,0 +1,292 @@
# Releasing geckodriver
Releasing geckodriver is not as easy as it once used to be when the
projects canonical home was on GitHub. Today geckodriver is hosted
in [mozilla-central], and whilst we do want to make future releases
from [Mozillas CI infrastructure], we are currently in between two
worlds: development happens in m-c, but releases continue to be made
from GitHub.
In any case, the steps to release geckodriver are as follows:
[mozilla-central]: https://hg.mozilla.org/mozilla-central/
[Mozillas CI infrastructure]: https://treeherder.mozilla.org/
## Update in-tree dependency crates
geckodriver depends on a number of Rust crates that also live in
central by using relative paths. Here an excerpt from its `Cargo.toml`:
```ini
[dependencies]
marionette = { path = "./marionette" }
mozdevice = { path = "../mozbase/rust/mozdevice" }
mozprofile = { path = "../mozbase/rust/mozprofile" }
mozrunner = { path = "../mozbase/rust/mozrunner" }
mozversion = { path = "../mozbase/rust/mozversion" }
webdriver = { path = "../webdriver" }
```
Because we need to export the geckodriver source code to the old
GitHub repository when we release, we first need to publish these
crates in the specified order if they have had any changes in the
interim since the last release. If they have received no changes,
you can skip them:
- `testing/mozbase/rust/mozdevice`
- `testing/mozbase/rust/mozprofile`
- `testing/mozbase/rust/mozrunner`
- `testing/mozbase/rust/mozversion`
- `testing/webdriver`
- `testing/geckodriver/marionette`
For each crate:
1. Change into the crates folder.
2. Bump the version number in `Cargo.toml` based on [semantic versioning rules],
and also update the version dependency for other in-tree crates using the
currently modified crate. Note that running `cargo update` will fail if you
missed updating a crate's dependency.
3. Use the [cargo-semver-checks] command to validate the version change:
```shell
% cargo semver-checks check-release
```
4. Update the crate:
```shell
% cargo update -p <crate name>
```
5. We also publish audit information for the crates based on Mozilla's
[audit criteria]. Because we use [wildcard audit entries] make sure that the
latest day of publication is still within the `end` date. The related entry
of the crate can be found at the top of [audits.toml]. If the date is over,
then update its value to at most 1 year in the future.
6. Commit the changes for the modified [Cargo.toml] files, [Cargo.lock] and
[audits.toml].
```shell
% git add Cargo.toml Cargo.lock audits.toml testing
% git commit -m "Bug XYZ - [rust-<name>] Release version <version>"
```
[semantic versioning rules]: https://semver.org/
[cargo-semver-checks]: https://crates.io/crates/cargo-semver-checks
[audit criteria]: https://mozilla.github.io/cargo-vet/audit-criteria.html
[wildcard audit entries]: https://mozilla.github.io/cargo-vet/wildcard-audit-entries.html
[Cargo.toml]: https://searchfox.org/mozilla-central/source/testing/geckodriver/Cargo.toml
[Cargo.lock]: https://searchfox.org/mozilla-central/source/Cargo.lock
[audits.toml]: https://searchfox.org/mozilla-central/source/supply-chain/audits.toml
## Update the change log
Notable changes to geckodriver are mentioned in [CHANGES.md]. Many
users rely on this, so its important that you make it **relevant
to end-users**. For example, we only mention changes that are visible
to users. The change log is not a complete anthology of commits,
as these often will not convey the essence of a change to end-users.
If a feature was added but removed before release, there is no reason
to list it as a change.
It is good practice to also include relevant information from the
[webdriver], [marionette], [rust-mozrunner], and [rust-mozdevice] crates,
since these are the most important dependencies of geckodriver and a lot
of its functionality is implemented there.
To get a list of all the changes for one of the above crates one of the following
commands can be used:
```shell
% hg log -M -r <revision>::central --template "{node|short}\t{desc|firstline}\n" <path>
% git log --reverse $(git cinnabar hg2git <revision>)..HEAD --pretty="%s" <path>
```
where `<revision>` is the changeset of the last geckodriver release and `<path>`
the location of the crate in the repository.
Add the list of changes to the related release bug on Bugzilla, and also check the
dependency list of the bug for other fixes that are worth mentioning.
We follow the writing style of the existing change log, with
one section per version (with a release date), with subsections
Added, Changed, 'Fixed' and Removed. If the targeted
Firefox or Selenium versions have changed, it is good to make a
mention of this. Lines are optimally formatted at roughly 72 columns
to make the file readable in a text editor as well as rendered HTML.
fmt(1) does a splendid job at text formatting.
[CHANGES.md]: https://searchfox.org/mozilla-central/source/testing/geckodriver/CHANGES.md
[webdriver]: https://searchfox.org/mozilla-central/source/testing/webdriver
[marionette]: https://searchfox.org/mozilla-central/source/testing/geckodriver/marionette
[rust-mozrunner]: https://searchfox.org/mozilla-central/source/testing/mozbase/rust/mozrunner
[rust-mozdevice]: https://searchfox.org/mozilla-central/source/testing/mozbase/rust/mozdevice
## Bump the version number and update the support page
Bump the version number in [Cargo.toml] to the next version.
geckodriver follows [semantic versioning] so its a good idea to
familiarise yourself with that before deciding on the version number.
After youve changed the version number, run
```shell
% ./mach build testing/geckodriver
```
again to update [Cargo.lock].
Now update the [support page] by adding a new row to the versions table,
including the required versions of Selenium, and Firefox.
Finally commit all those changes.
[semantic versioning]: http://semver.org/
[support page]: https://searchfox.org/mozilla-central/source/testing/geckodriver/doc/Support.md
## Add the changeset id
To easily allow a release build of geckodriver after cloning the
repository, the changeset id for the release has to be added to the
change log. Therefore add a final place-holder commit to the patch
series, to already get review for.
Once all previous revisions of the patch series have been landed, and got merged
to `mozilla-central`, the changeset id from the merge commit has to picked for
finalizing the change log. This specific id is needed because Taskcluster creates
the final signed builds based on that merge.
## Release new in-tree dependency crates
Make sure to wait until the complete patch series from above has been
merged to mozilla-central. Then continue with the following steps.
Before releasing geckodriver all dependency crates as
[updated earlier](#update-in-tree-dependency-crates) have to be
released first.
Therefore change into each of the directories for crates with an update
and run the following command to publish the crate:
```shell
% cargo publish
```
Note that if a crate has an in-tree dependency make sure to first
change the dependency information.
Do not release the geckodriver crate yet!
Once all crates have been published observe the `/target/package/` folder under
the root of the mozilla-central repository and remove all the folders related
to the above published packages (it will save ~1GB disk space).
## Export to GitHub
The canonical GitHub repository is <https://github.com/mozilla/geckodriver.git>
so make sure you have a local clone of that. It has three branches:
_master_ which only contains the [README.md]; _old_ which was the
state of the project when it was exported to mozilla-central; and
_release_, from where releases are made.
Before we copy the code over to the GitHub repository we need to
check out the [release commit that bumped the version number](#add-the-changeset-id)
on mozilla-central:
```shell
% hg update $RELEASE_REVISION
```
Or:
```shell
% git checkout $(git cinnabar hg2git $RELEASE_REVISION)
```
We will now export the contents of [testing/geckodriver] to a new branch that
is based on the _release_ branch, which will be used to create a pull request:
```shell
% cd $SRC/geckodriver
% git checkout release
% git pull
% git checkout -b do_release_X.Y.Z
% git rm -rf .
% git clean -fxd
% cp -rt $SRC/gecko/testing/geckodriver .
```
Now verify that geckodriver builds correctly by running:
```shell
% cargo build
```
[README.md]: https://searchfox.org/mozilla-central/source/testing/geckodriver/README.md
[testing/geckodriver]: https://searchfox.org/mozilla-central/source/testing/geckodriver
## Commit local changes
Now commit all the changes you have made locally to the _release_ branch.
It is recommended to setup a [GPG key] for signing the commit, so
that the release commit is marked as `verified`.
```shell
% git add . -- ':!mach_commands.py :!moz.build :!target/*'
% git commit -S -am "Import of vX.Y.Z" (signed)
```
or if you cannot use signing use:
```shell
% git add . -- ':!mach_commands.py :!moz.build :!target/*'
% git commit -am "Import of vX.Y.Z" (unsigned)
```
Then push the changes, and create a pull request:
```shell
% git push origin do_release_X.Y.Z
```
As indicated above, the changes you make to this branch will not
be upstreamed back into mozilla-central. It is merely used as a
place for external consumers to build their own version of geckodriver.
[GPG key]: https://help.github.com/articles/signing-commits/
## Make the release
geckodriver needs to be manually released on github.com. Therefore start to
[draft a new release], and make the following changes:
1. Specify the "Tag version", and select "Release" as target.
2. Leave the release title empty
3. Paste the raw Markdown source from [CHANGES.md] into the description field.
This will highlight for end-users what changes were made in that particular
package when they visit the GitHub downloads section. Make sure to check that
all references can be resolved, and if not make sure to add those too.
4. Find the signed geckodriver archives in the [taskcluster index] by
replacing %changeset% with the full release changeset id. Rename the
individual files so the basename looks like 'geckodriver-v%version%-%platform%'.
Upload them all, including the checksum files for the Linux platforms.
5. Before announcing the release on GitHub publish the geckodriver crate as well
on crates.io by running `cargo publish` from the release branch.
6. Send the release announcement to the [dev-webdriver] mailing list.
[draft a new release]: https://github.com/mozilla/geckodriver/releases/new
[taskcluster index]: https://firefox-ci-tc.services.mozilla.com/tasks/index/gecko.v2.mozilla-central.revision.%changeset%.geckodriver
[dev-webdriver]: https://groups.google.com/a/mozilla.org/g/dev-webdriver
Congratulations! Youve released geckodriver!

@ -0,0 +1,183 @@
<!-- markdownlint-disable MD033 -->
# Supported platforms
The following table shows a mapping between [geckodriver releases],
and required versions of Selenium and Firefox:
<style type="text/css">
table { width: 100%; margin-bottom: 2em; }
table, th, td { border: solid gray 1px; }
td, th { padding: 5px 10px; text-align: center; }
</style>
<table>
<thead>
<tr>
<th rowspan="2">geckodriver
<th rowspan="2">Selenium
<th colspan="2">Firefox
</tr>
<tr>
<th>min
<th>max
</tr>
</thead>
</thead>
<tr>
<td>0.33.0
<td>≥ 3.11 (3.14 Python)
<td>102 ESR
<td>n/a
<tr>
<td>0.32.2
<td>≥ 3.11 (3.14 Python)
<td>102 ESR
<td>n/a
<tr>
<td>0.32.1
<td>≥ 3.11 (3.14 Python)
<td>102 ESR
<td>n/a
<tr>
<td>0.32.0
<td>≥ 3.11 (3.14 Python)
<td>102 ESR
<td>n/a
<tr>
<td>0.31.0
<td>≥ 3.11 (3.14 Python)
<td>91 ESR
<td>n/a
<tr>
<tr>
<td>0.30.0
<td>≥ 3.11 (3.14 Python)
<td>78 ESR
<td>90
<tr>
<td>0.29.1
<td>≥ 3.11 (3.14 Python)
<td>60
<td>90
<tr>
<td>0.29.0
<td>≥ 3.11 (3.14 Python)
<td>60
<td>90
<tr>
<td>0.28.0
<td>≥ 3.11 (3.14 Python)
<td>60
<td>90
<tr>
<td>0.27.0
<td>≥ 3.11 (3.14 Python)
<td>60
<td>90
<tr>
<td>0.26.0
<td>≥ 3.11 (3.14 Python)
<td>60
<td>90
<tr>
<td>0.25.0
<td>≥ 3.11 (3.14 Python)
<td>57
<td>90
<tr>
<td>0.24.0
<td>≥ 3.11 (3.14 Python)
<td>57
<td>79
<tr>
<td>0.23.0
<td>≥ 3.11 (3.14 Python)
<td>57
<td>79
<tr>
<td>0.22.0
<td>≥ 3.11 (3.14 Python)
<td>57
<td>79
<tr>
<td>0.21.0
<td>≥ 3.11 (3.14 Python)
<td>57
<td>79
<tr>
<td>0.20.1
<td>≥ 3.5
<td>55
<td>62
<tr>
<td>0.20.0
<td>≥ 3.5
<td>55
<td>62
<tr>
<td>0.19.1
<td>≥ 3.5
<td>55
<td>62
<tr>
<td>0.19.0
<td>≥ 3.5
<td>55
<td>62
<tr>
<td>0.18.0
<td>≥ 3.4
<td>53
<td>62
<tr>
<td>0.17.0
<td>≥ 3.4
<td>52
<td>62
</table>
## Clients
[Selenium] users must update to version 3.11 or later to use geckodriver.
Other clients that follow the [W3C WebDriver specification][WebDriver]
are also supported.
## Firefoxen
geckodriver is not yet feature complete. This means that it does
not yet offer full conformance with the [WebDriver] standard
or complete compatibility with [Selenium]. You can track the
[implementation status] of the latest [Firefox Nightly] on MDN.
We also keep track of known [Selenium], [remote protocol], and
[specification] problems in our [issue tracker].
Support is best in Firefox 57 and greater, although generally the more
recent the Firefox version, the better the experience as they have
more bug fixes and features. Some features will only be available
in the most recent Firefox versions, and we strongly advise using the
latest [Firefox Nightly] with geckodriver. Since Windows XP support
in Firefox was dropped with Firefox 53, we do not support this platform.
## Android
Starting with the 0.26.0 release geckodriver is able to connect
to Android devices, and to control packages which are based on [GeckoView]
(eg. [Firefox Preview] aka Fenix, or [Firefox Reality]). But it also still
supports versions of Fennec up to 68 ESR, which is the last officially
supported release from Mozilla.
To run tests on Android specific capabilities under `moz:firefoxOptions`
have to be set when requesting a new session. See the Android section under
[Firefox Capabilities](Capabilities.md#android) for more details.
[geckodriver releases]: https://github.com/mozilla/geckodriver/releases
[Selenium]: https://github.com/seleniumhq/selenium
[WebDriver]: https://w3c.github.io/webdriver/
[implementation status]: https://bugzilla.mozilla.org/showdependencytree.cgi?id=721859&hide_resolved=1
[remote protocol]: https://github.com/mozilla/geckodriver/issues?q=is%3Aissue+is%3Aopen+label%3Amarionette
[specification]: https://github.com/mozilla/geckodriver/issues?q=is%3Aissue+is%3Aopen+label%3Aspec
[issue tracker]: https://github.com/mozilla/geckodriver/issues
[Firefox Nightly]: https://nightly.mozilla.org/
[GeckoView]: https://wiki.mozilla.org/Mobile/GeckoView
[Firefox Preview]: https://play.google.com/store/apps/details?id=org.mozilla.fenix
[Firefox Reality]: https://play.google.com/store/apps/details?id=org.mozilla.vrbrowser

@ -0,0 +1,69 @@
# Testing geckodriver
We verify and test geckodriver in a couple of different ways.
Since it is an implementation of the WebDriver web standard, we share
a set of conformance tests with other browser vendors through the
[Web Platform Tests] (WPT) initiative. This lets us ensure web
compatibility between _different_ WebDriver implementations for
different browsers.
In addition to the WPT tests, geckodriver and webdriver have
unit tests. These are written in Rust, but you must explicitly
tell mach to build these by adding the following line to your [mozconfig]:
```make
ac_add_options --enable-rust-tests
```
Tests can then be run by using the `test` sub command for [cargo] in the
specific source folder:
```shell
% cd testing/geckodriver/src
% cargo test
```
To run the more extensive WPT tests you can use mach, but first
make sure you have built Firefox:
```shell
% ./mach build
% ./mach wpt testing/web-platform/tests/webdriver
```
As these are functional integration tests and pop up Firefox windows
sporadically, a helpful tip is to suppress the window whilst you
are running them by using Firefox [headless mode]:
```shell
% ./mach wpt --headless testing/web-platform/tests/webdriver
```
The `--headless` flag is equivalent to setting the `MOZ_HEADLESS`
output variable. In addition to `MOZ_HEADLESS` there is also
`MOZ_HEADLESS_WIDTH` and `MOZ_HEADLESS_HEIGHT` for controlling the
dimensions of the no-op virtual display. This is similar to using
Xvfb(1) which you may know from the X windowing system, but has
the additional benefit of also working on macOS and Windows.
As you get in to development of geckodriver and Marionette you will
increasingly grow to understand our love for [trace-level logs].
They provide us with the input—the HTTP requests—from the client
(in WPTs case from the tests use of a custom WebDriver client),
the translation geckodriver makes to the [Marionette protocol],
the log output from Marionette, its responses back to geckodriver,
and finally the output—or the HTTP response—back to the client.
The [trace-level logs] can be surfaced by passing on the `-vv`
flag to geckodriver through WPT:
```shell
% ./mach wpt --webdriver-arg=-vv testing/web-platform/tests/webdriver
```
[Web Platform Tests]: http://web-platform-tests.org/
[cargo]: http://doc.crates.io/guide.html
[headless mode]: https://developer.mozilla.org/en-US/Firefox/Headless_mode
[mozconfig]: https://developer.mozilla.org/en-US/docs/Mozilla/Developer_guide/Build_Instructions/Configuring_Build_Options
[trace-level logs]: TraceLogs.md
[Marionette protocol]: /testing/marionette/Protocol.md

@ -0,0 +1,206 @@
# Enabling trace logs
geckodriver provides different bands of logs for different audiences.
The most important log entries are shown to everyone by default,
and these include which port geckodriver provides the WebDriver
API on, as well as informative warnings, errors, and fatal exceptions.
The different log bands are, in ascending bandwidth:
1. `fatal` is reserved for exceptional circumstances when geckodriver
or Firefox cannot recover. This usually entails that either
one or both of the processes will exit.
2. `error` messages are mistakes in the program code which it is
possible to recover from.
3. `warn` shows warnings of more informative nature that are not
necessarily problems in geckodriver. This could for example happen
if you use the legacy `desiredCapabilities`/`requiredCapabilities`
objects instead of the new `alwaysMatch`/`firstMatch` structures.
4. `info` (default) contains information about which port geckodriver
binds to, but also all messages from the lower-bandwidth levels
listed above.
5. `config` additionally shows the negotiated capabilities after
matching the `alwaysMatch` capabilities with the sequence of
`firstMatch` capabilities.
6. `debug` is reserved for information that is useful when programming.
7. `trace`, where in addition to itself, all previous levels
are included. The trace level shows all HTTP requests received
by geckodriver, packets sent to and from the remote protocol in
Firefox, and responses sent back to your client.
In other words this means that the configured level will coalesce
entries from all lower bands including itself. If you set the log
level to `error`, you will get log entries for both `fatal` and `error`.
Similarly for `trace`, you will get all the logs that are offered.
To help debug a problem with geckodriver or Firefox, the trace-level
output is vital to understand what is going on. This is why we ask
that trace logs are included when filing bugs gainst geckodriver.
It is only under very special circumstances that a trace log is
not needed, so you will normally find that our first action when
triaging your issue will be to ask you to include one. Do yourself
and us a favour and provide a trace-level log right away.
To silence geckodriver altogether you may for example either redirect
all output to append to some log files:
```shell
% geckodriver >>geckodriver.log 2>>geckodriver.err.log
```
Or a black hole somewhere:
```shell
% geckodriver >/dev/null 2>&1
```
The log level set for geckodriver is propagated to the Marionette
logger in Firefox. Marionette is the remote protocol that geckodriver
uses to implement WebDriver. This means enabling trace logs for
geckodriver will also implicitly enable them for Marionette.
The log level is set in different ways. Either by using the
`--log <LEVEL>` option, where `LEVEL` is one of the log levels
from the list above, or by using the `-v` (for debug) or `-vv`
(for trace) shorthands. For example, the following command will
enable trace logs for both geckodriver and Marionette:
```shell
% geckodriver -vv
```
The second way of setting the log level is through capabilities.
geckodriver accepts a Mozilla-specific configuration object
in [`moz:firefoxOptions`]. This JSON Object, which is further
described in the [README] can hold Firefox-specific configuration,
such as which Firefox binary to use, additional preferences to set,
and of course which log level to use.
[`moz:firefoxOptions`]: https://searchfox.org/mozilla-central/source/testing/geckodriver/README.md#firefox-capabilities
[README]: https://searchfox.org/mozilla-central/source/testing/geckodriver/README.md
Each client has its own way of specifying capabilities, and some clients
include “helpers” for providing browser-specific configuration.
It is often advisable to use these helpers instead of encoding the
JSON Object yourself because it can be difficult to get the exact
details right, but if you choose to, it should look like this:
```json
{"moz:firefoxOptions": {"log": {"level": "trace"}}}
```
Note that most known WebDriver clients, such as those provided by
the Selenium project, do not expose a way to actually _see_ the logs
unless you redirect the log output to a particular file (using the
method shown above) or let the client “inherit” geckodrivers
output, for example by redirecting the stdout and stderr streams to
its own. The notable exceptions are the Python and Ruby bindings,
which surface geckodriver logs in a remarkable easy and efficient way.
See the client-specific documentation below for the most idiomatic
way to enable trace logs in your language. We want to expand this
documentation to cover all the best known clients people use with
geckodriver. If you find your language missing, please consider
[submitting a patch].
[submitting a patch]: Patches.md
## C-Sharp
The Selenium [C# client] comes with a [`FirefoxOptions`] helper for
constructing the [`moz:firefoxOptions`] capabilities object:
```csharp
FirefoxOptions options = new FirefoxOptions();
options.LogLevel = FirefoxDriverLogLevel.Trace;
IWebDriver driver = new FirefoxDriver(options);
```
The log output is directed to stdout.
[C# client]: https://seleniumhq.github.io/selenium/docs/api/dotnet/
[`FirefoxOptions`]: https://seleniumhq.github.io/selenium/docs/api/dotnet/html/T_OpenQA_Selenium_Firefox_FirefoxOptions.htm
## Java
The Selenium [Java client] also comes with
a [`org.openqa.selenium.firefox.FirefoxOptions`] helper for
constructing the [`moz:firefoxOptions`] capabilities object:
```java
FirefoxOptions options = new FirefoxOptions();
options.setLogLevel(FirefoxDriverLogLevel.TRACE);
WebDriver driver = new FirefoxDriver(options);
```
The log output is directed to stdout.
[Java client]: https://seleniumhq.github.io/selenium/docs/api/java/
[`org.openqa.selenium.firefox.FirefoxOptions`]: https://seleniumhq.github.io/selenium/docs/api/java/org/openqa/selenium/firefox/FirefoxOptions.html
## Javascript (webdriver.io)
With the Selenium [JavaScript client] the capabilities object can directly be
constructed:
```javascript
import WebDriver from 'webdriver'
const driver = await WebDriver.newSession({
capabilities: {
browserName: 'firefox',
'moz:firefoxOptions': {
log: { level: 'trace' },
}
}
})
```
The log output is directed to stdout, or if geckodriver runs as a wdio plugin
then the generated logs are part of the wdio log system.
[JavaScript client]: https://webdriver.io/
## Python
The Selenium [Python client] comes with a
[`selenium.webdriver.firefox.options.Options`] helper that can
be used programmatically to construct the [`moz:firefoxOptions`]
capabilities object:
```python
from selenium.webdriver import Firefox
from selenium.webdriver.firefox.options import Options
opts = Options()
opts.log.level = "trace"
driver = Firefox(options=opts)
```
The log output is stored in a file called _geckodriver.log_ in your
scripts current working directory.
[Python client]: https://selenium-python.readthedocs.io/
[`selenium.webdriver.firefox.options.Options`]: https://github.com/SeleniumHQ/selenium/blob/master/py/selenium/webdriver/firefox/options.py
## Ruby
The Selenium [Ruby client] comes with an [`Options`] helper to
generate the correct [`moz:firefoxOptions`] capabilities object:
```ruby
Selenium::WebDriver.logger.level = :debug
opts = Selenium::WebDriver::Firefox::Options.new(log_level: :trace)
driver = Selenium::WebDriver.for :firefox, options: opts
```
The log output is directed to stdout.
[Ruby client]: https://seleniumhq.github.io/selenium/docs/api/rb/
[`Options`]: https://seleniumhq.github.io/selenium/docs/api/rb/Selenium/WebDriver/Firefox/Options.html

@ -0,0 +1,143 @@
# Usage
geckodriver is an implementation of WebDriver, and WebDriver can
be used for widely different purposes. How you invoke geckodriver
largely depends on your use case.
## Running Firefox in a container-based package
When Firefox is packaged inside a container (e.g. [Snap], [Flatpak]), it may
see a different filesystem to the host. This can affect access to the generated
profile directory, which may result in a hang when starting Firefox.
This is known to affect launching the default Firefox shipped with Ubuntu 22.04+.
There are several workarounds available for this problem:
- Do not use container-packaged Firefox builds with geckodriver. Instead
download a Firefox release from <https://download.mozilla.org/?product=firefox-latest&os=linux>
and a geckodriver release from <https://github.com/mozilla/geckodriver/releases>.
- Use a geckodriver that runs in the same container filesystem as the Firefox
package. For example on Ubuntu `/snap/bin/geckodriver` will work with the
default Firefox.
- Set the `--profile-root` command line option to write the profile to a
directory accessible to both Firefox and geckodriver, for example a non-hidden
directory under `$HOME`.
[Flatpak]: https://flatpak.org/
[Snap]: https://ubuntu.com/core/services/guide/snaps-intro
## Selenium
If you are using geckodriver through [Selenium], you must ensure that
you have version 3.11 or greater. Because geckodriver implements the
[W3C WebDriver standard][WebDriver] and not the same Selenium wire
protocol older drivers are using, you may experience incompatibilities
and migration problems when making the switch from FirefoxDriver to
geckodriver.
Generally speaking, Selenium 3 enabled geckodriver as the default
WebDriver implementation for Firefox. With the release of Firefox 47,
FirefoxDriver had to be discontinued for its lack of support for the
[new multi-processing architecture in Gecko][e10s].
Selenium client bindings will pick up the _geckodriver_ binary executable
from your [systems `PATH` environmental variable][PATH] unless you
override it by setting the `webdriver.gecko.driver` [Java VM system
property]:
```java
System.setProperty("webdriver.gecko.driver", "/home/user/bin");
```
Or by passing it as a flag to the [java(1)] launcher:
```shell
% java -Dwebdriver.gecko.driver=/home/user/bin YourApplication
```
Your mileage with this approach may vary based on which programming
language bindings you are using. It is in any case generally the case
that geckodriver will be picked up if it is available on the system path.
In a bash compatible shell, you can make other programs aware of its
location by exporting or setting the `PATH` variable:
```shell
% export PATH=$PATH:/home/user/bin
% whereis geckodriver
geckodriver: /home/user/bin/geckodriver
```
On Window systems you can change the system path by right-clicking **My
Computer** and choosing **Properties**. In the dialogue that appears,
navigate **Advanced****Environmental Variables****Path**.
Or in the Windows console window:
```shell
% set PATH=%PATH%;C:\bin\geckodriver
```
## Standalone
Since geckodriver is a separate HTTP server that is a complete remote end
implementation of [WebDriver], it is possible to avoid using the Selenium
remote server if you have no requirements to distribute processes across
a matrix of systems.
Given a W3C WebDriver conforming client library (or _local end_) you
may interact with the geckodriver HTTP server as if you were speaking
to any Selenium server.
Using [curl(1)]:
```shell
% geckodriver &
[1] 16010
% 1491834109194 geckodriver INFO Listening on 127.0.0.1:4444
% curl -H 'Content-Type: application/json' -d '{"capabilities": {"alwaysMatch": {"acceptInsecureCerts": true}}}' http://localhost:4444/session
{"value":{"sessionId":"d4605710-5a4e-4d64-a52a-778bb0c31e00","capabilities":{"acceptInsecureCerts":true,[...]}}}
% curl -H 'Content-Type: application/json' -d '{"url": "https://mozilla.org"}' http://localhost:4444/session/d4605710-5a4e-4d64-a52a-778bb0c31e00/url
{}
% curl http://localhost:4444/session/d4605710-5a4e-4d64-a52a-778bb0c31e00/url
{"value":"https://www.mozilla.org/en-US/"
% curl -X DELETE http://localhost:4444/session/d4605710-5a4e-4d64-a52a-778bb0c31e00
{}
% fg
geckodriver
^C
```
Using the Python [wdclient] library:
```python
import webdriver
with webdriver.Session("127.0.0.1", 4444) as session:
session.url = "https://mozilla.org"
print "The current URL is %s" % session.url
```
And to run:
```shell
% geckodriver &
[1] 16054
% python example.py
1491835308354 geckodriver INFO Listening on 127.0.0.1:4444
The current URL is https://www.mozilla.org/en-US/
% fg
geckodriver
^C
```
[Selenium]: http://seleniumhq.org/
[e10s]: https://developer.mozilla.org/en-US/Firefox/Multiprocess_Firefox
[PATH]: https://en.wikipedia.org/wiki/PATH_(variable)
[Java VM system property]: http://docs.oracle.com/javase/tutorial/essential/environment/sysprop.html
[java(1)]: http://www.manpagez.com/man/1/java/
[WebDriver]: https://w3c.github.io/webdriver/
[curl(1)]: http://www.manpagez.com/man/1/curl/
[wdclient]: https://github.com/web-platform-tests/wpt/tree/master/tools/webdriver

@ -0,0 +1,55 @@
===========
geckodriver
===========
Proxy for using W3C WebDriver-compatible clients to interact with
Gecko-based browsers.
This program provides the HTTP API described by the `WebDriver protocol`_.
to communicate with Gecko browsers, such as Firefox. It translates calls
into the :ref:`Firefox remote protocol <Protocol>` by acting as a proxy between the local-
and remote ends.
You can consult the `change log`_ for a record of all notable changes
to the program. Releases_ are made available on GitHub.
.. _WebDriver protocol: https://w3c.github.io/webdriver/#protocol
.. _change log: https://github.com/mozilla/geckodriver/releases
.. _Releases: https://github.com/mozilla/geckodriver/releases
.. toctree::
:maxdepth: 1
Support.md
WebDriver capabilities <https://developer.mozilla.org/en-US/docs/Web/WebDriver/Capabilities>
Capabilities.md
Usage.md
Flags.md
Profiles.md
Bugs.md
TraceLogs.md
CrashReports.md
Notarization.md
For developers
==============
.. toctree::
:maxdepth: 1
Building.md
Testing.md
Patches.md
Releasing.md
ARM.md
Communication
=============
The mailing list for geckodriver discussion is
https://groups.google.com/a/mozilla.org/g/dev-webdriver.
If you prefer real-time chat, ask your questions
on `#webdriver:mozilla.org <https://chat.mozilla.org/#/room/#webdriver:mozilla.org>`__.

@ -0,0 +1,14 @@
[package]
name = "marionette"
version = "0.4.0"
authors = ["Mozilla"]
description = "Library implementing the client side of Gecko's Marionette remote automation protocol."
edition = "2018"
keywords = ["mozilla", "firefox", "marionette", "webdriver"]
license = "MPL-2.0"
repository = "https://hg.mozilla.org/mozilla-central/file/tip/testing/geckodriver/marionette"
[dependencies]
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
serde_repr = "0.1"

@ -0,0 +1,240 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use serde::ser::SerializeMap;
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
use serde_json::Value;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BoolValue {
value: bool,
}
impl BoolValue {
pub fn new(val: bool) -> Self {
BoolValue { value: val }
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Cookie {
pub name: String,
pub value: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub path: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub domain: Option<String>,
#[serde(default)]
pub secure: bool,
#[serde(default, rename = "httpOnly")]
pub http_only: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub expiry: Option<Date>,
#[serde(skip_serializing_if = "Option::is_none", rename = "sameSite")]
pub same_site: Option<String>,
}
pub fn to_cookie<T, S>(data: T, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
T: Serialize,
{
#[derive(Serialize)]
struct Wrapper<T> {
cookie: T,
}
Wrapper { cookie: data }.serialize(serializer)
}
pub fn from_cookie<'de, D, T>(deserializer: D) -> Result<T, D::Error>
where
D: Deserializer<'de>,
T: serde::de::DeserializeOwned,
T: std::fmt::Debug,
{
#[derive(Debug, Deserialize)]
struct Wrapper<T> {
cookie: T,
}
let w = Wrapper::deserialize(deserializer)?;
Ok(w.cookie)
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Date(pub u64);
#[derive(Clone, Debug, PartialEq)]
pub enum Frame {
Index(u16),
Element(String),
Parent,
}
impl Serialize for Frame {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut map = serializer.serialize_map(Some(1))?;
match self {
Frame::Index(nth) => map.serialize_entry("id", nth)?,
Frame::Element(el) => map.serialize_entry("element", el)?,
Frame::Parent => map.serialize_entry("id", &Value::Null)?,
}
map.end()
}
}
impl<'de> Deserialize<'de> for Frame {
fn deserialize<D>(deserializer: D) -> Result<Frame, D::Error>
where
D: Deserializer<'de>,
{
#[derive(Debug, Deserialize)]
#[serde(rename_all = "lowercase")]
struct JsonFrame {
id: Option<u16>,
element: Option<String>,
}
let json = JsonFrame::deserialize(deserializer)?;
match (json.id, json.element) {
(Some(_id), Some(_element)) => Err(de::Error::custom("conflicting frame identifiers")),
(Some(id), None) => Ok(Frame::Index(id)),
(None, Some(element)) => Ok(Frame::Element(element)),
(None, None) => Ok(Frame::Parent),
}
}
}
// TODO(nupur): Bug 1567165 - Make WebElement in Marionette a unit struct
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WebElement {
#[serde(rename = "element-6066-11e4-a52e-4f735466cecf")]
pub element: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Timeouts {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub implicit: Option<u64>,
#[serde(default, rename = "pageLoad", skip_serializing_if = "Option::is_none")]
pub page_load: Option<u64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
#[allow(clippy::option_option)]
pub script: Option<Option<u64>>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Window {
pub handle: String,
}
pub fn to_name<T, S>(data: T, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
T: Serialize,
{
#[derive(Serialize)]
struct Wrapper<T> {
name: T,
}
Wrapper { name: data }.serialize(serializer)
}
pub fn from_name<'de, D, T>(deserializer: D) -> Result<T, D::Error>
where
D: Deserializer<'de>,
T: serde::de::DeserializeOwned,
T: std::fmt::Debug,
{
#[derive(Debug, Deserialize)]
struct Wrapper<T> {
name: T,
}
let w = Wrapper::deserialize(deserializer)?;
Ok(w.name)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test::{assert_de, assert_ser, assert_ser_de, ELEMENT_KEY};
use serde_json::json;
#[test]
fn test_cookie_default_values() {
let data = Cookie {
name: "hello".into(),
value: "world".into(),
path: None,
domain: None,
secure: false,
http_only: false,
expiry: None,
same_site: None,
};
assert_de(&data, json!({"name":"hello", "value":"world"}));
}
#[test]
fn test_json_frame_index() {
assert_ser_de(&Frame::Index(1234), json!({"id": 1234}));
}
#[test]
fn test_json_frame_element() {
assert_ser_de(&Frame::Element("elem".into()), json!({"element": "elem"}));
}
#[test]
fn test_json_frame_parent() {
assert_ser_de(&Frame::Parent, json!({ "id": null }));
}
#[test]
fn test_web_element() {
let data = WebElement {
element: "foo".into(),
};
assert_ser_de(&data, json!({ELEMENT_KEY: "foo"}));
}
#[test]
fn test_timeouts_with_all_params() {
let data = Timeouts {
implicit: Some(1000),
page_load: Some(200000),
script: Some(Some(60000)),
};
assert_ser_de(
&data,
json!({"implicit":1000,"pageLoad":200000,"script":60000}),
);
}
#[test]
fn test_timeouts_with_missing_params() {
let data = Timeouts {
implicit: Some(1000),
page_load: None,
script: None,
};
assert_ser_de(&data, json!({"implicit":1000}));
}
#[test]
fn test_timeouts_setting_script_none() {
let data = Timeouts {
implicit: Some(1000),
page_load: None,
script: Some(None),
};
assert_ser(&data, json!({"implicit":1000, "script":null}));
}
}

@ -0,0 +1,184 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::error;
use std::fmt;
use serde::{Deserialize, Serialize};
#[derive(Clone, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize, Deserialize)]
#[serde(untagged)]
pub(crate) enum Error {
Marionette(MarionetteError),
}
impl Error {
pub fn kind(&self) -> ErrorKind {
match *self {
Error::Marionette(ref err) => err.kind,
}
}
}
impl fmt::Debug for Error {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self {
Error::Marionette(ref err) => fmt
.debug_struct("Marionette")
.field("kind", &err.kind)
.field("message", &err.message)
.field("stacktrace", &err.stack.clone())
.finish(),
}
}
}
impl fmt::Display for Error {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self {
Error::Marionette(ref err) => write!(fmt, "{}: {}", err.kind, err.message),
}
}
}
impl error::Error for Error {
fn description(&self) -> &str {
match self {
Error::Marionette(_) => self.kind().as_str(),
}
}
}
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize, Deserialize)]
pub struct MarionetteError {
#[serde(rename = "error")]
pub kind: ErrorKind,
#[serde(default = "empty_string")]
pub message: String,
#[serde(rename = "stacktrace", default = "empty_string")]
pub stack: String,
}
fn empty_string() -> String {
"".to_owned()
}
impl From<MarionetteError> for Error {
fn from(error: MarionetteError) -> Error {
Error::Marionette(error)
}
}
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize, Deserialize)]
pub enum ErrorKind {
#[serde(rename = "element click intercepted")]
ElementClickIntercepted,
#[serde(rename = "element not accessible")]
ElementNotAccessible,
#[serde(rename = "element not interactable")]
ElementNotInteractable,
#[serde(rename = "insecure certificate")]
InsecureCertificate,
#[serde(rename = "invalid argument")]
InvalidArgument,
#[serde(rename = "invalid cookie")]
InvalidCookieDomain,
#[serde(rename = "invalid element state")]
InvalidElementState,
#[serde(rename = "invalid selector")]
InvalidSelector,
#[serde(rename = "invalid session id")]
InvalidSessionId,
#[serde(rename = "javascript error")]
JavaScript,
#[serde(rename = "move target out of bounds")]
MoveTargetOutOfBounds,
#[serde(rename = "no such alert")]
NoSuchAlert,
#[serde(rename = "no such element")]
NoSuchElement,
#[serde(rename = "no such frame")]
NoSuchFrame,
#[serde(rename = "no such window")]
NoSuchWindow,
#[serde(rename = "script timeout")]
ScriptTimeout,
#[serde(rename = "session not created")]
SessionNotCreated,
#[serde(rename = "stale element reference")]
StaleElementReference,
#[serde(rename = "timeout")]
Timeout,
#[serde(rename = "unable to set cookie")]
UnableToSetCookie,
#[serde(rename = "unexpected alert open")]
UnexpectedAlertOpen,
#[serde(rename = "unknown command")]
UnknownCommand,
#[serde(rename = "unknown error")]
Unknown,
#[serde(rename = "unsupported operation")]
UnsupportedOperation,
#[serde(rename = "webdriver error")]
WebDriver,
}
impl ErrorKind {
pub(crate) fn as_str(self) -> &'static str {
use ErrorKind::*;
match self {
ElementClickIntercepted => "element click intercepted",
ElementNotAccessible => "element not accessible",
ElementNotInteractable => "element not interactable",
InsecureCertificate => "insecure certificate",
InvalidArgument => "invalid argument",
InvalidCookieDomain => "invalid cookie",
InvalidElementState => "invalid element state",
InvalidSelector => "invalid selector",
InvalidSessionId => "invalid session id",
JavaScript => "javascript error",
MoveTargetOutOfBounds => "move target out of bounds",
NoSuchAlert => "no such alert",
NoSuchElement => "no such element",
NoSuchFrame => "no such frame",
NoSuchWindow => "no such window",
ScriptTimeout => "script timeout",
SessionNotCreated => "session not created",
StaleElementReference => "stale eelement referencee",
Timeout => "timeout",
UnableToSetCookie => "unable to set cookie",
UnexpectedAlertOpen => "unexpected alert open",
UnknownCommand => "unknown command",
Unknown => "unknown error",
UnsupportedOperation => "unsupported operation",
WebDriver => "webdriver error",
}
}
}
impl fmt::Display for ErrorKind {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{}", self.as_str())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test::assert_ser_de;
use serde_json::json;
#[test]
fn test_json_error() {
let err = MarionetteError {
kind: ErrorKind::Timeout,
message: "".into(),
stack: "".into(),
};
assert_ser_de(
&err,
json!({"error": "timeout", "message": "", "stacktrace": ""}),
);
}
}

@ -0,0 +1,14 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
pub mod error;
pub mod common;
pub mod marionette;
pub mod message;
pub mod result;
pub mod webdriver;
#[cfg(test)]
mod test;

@ -0,0 +1,69 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use serde::{Deserialize, Serialize};
use crate::common::BoolValue;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[allow(non_camel_case_types)]
pub enum AppStatus {
eAttemptQuit,
eConsiderQuit,
eForceQuit,
eRestart,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Command {
#[serde(rename = "Marionette:AcceptConnections")]
AcceptConnections(BoolValue),
#[serde(rename = "Marionette:Quit")]
DeleteSession { flags: Vec<AppStatus> },
#[serde(rename = "Marionette:GetContext")]
GetContext,
#[serde(rename = "Marionette:GetScreenOrientation")]
GetScreenOrientation,
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test::assert_ser_de;
use serde_json::json;
#[test]
fn test_json_command_accept_connections() {
assert_ser_de(
&Command::AcceptConnections(BoolValue::new(false)),
json!({"Marionette:AcceptConnections": {"value": false }}),
);
}
#[test]
fn test_json_command_delete_session() {
let data = &Command::DeleteSession {
flags: vec![AppStatus::eForceQuit],
};
assert_ser_de(data, json!({"Marionette:Quit": {"flags": ["eForceQuit"]}}));
}
#[test]
fn test_json_command_get_context() {
assert_ser_de(&Command::GetContext, json!("Marionette:GetContext"));
}
#[test]
fn test_json_command_get_screen_orientation() {
assert_ser_de(
&Command::GetScreenOrientation,
json!("Marionette:GetScreenOrientation"),
);
}
#[test]
fn test_json_command_invalid() {
assert!(serde_json::from_value::<Command>(json!("foo")).is_err());
}
}

@ -0,0 +1,336 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use serde::de::{self, SeqAccess, Unexpected, Visitor};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde_json::{Map, Value};
use serde_repr::{Deserialize_repr, Serialize_repr};
use std::fmt;
use crate::error::MarionetteError;
use crate::marionette;
use crate::result::MarionetteResult;
use crate::webdriver;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum Command {
WebDriver(webdriver::Command),
Marionette(marionette::Command),
}
impl Command {
pub fn name(&self) -> String {
let (command_name, _) = self.first_entry();
command_name
}
fn params(&self) -> Value {
let (_, params) = self.first_entry();
params
}
fn first_entry(&self) -> (String, serde_json::Value) {
match serde_json::to_value(self).unwrap() {
Value::String(cmd) => (cmd, Value::Object(Map::new())),
Value::Object(items) => {
let mut iter = items.iter();
let (cmd, params) = iter.next().unwrap();
(cmd.to_string(), params.clone())
}
_ => unreachable!(),
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize_repr, Deserialize_repr)]
#[repr(u8)]
enum MessageDirection {
Incoming = 0,
Outgoing = 1,
}
pub type MessageId = u32;
#[derive(Debug, Clone, PartialEq)]
pub struct Request(pub MessageId, pub Command);
impl Request {
pub fn id(&self) -> MessageId {
self.0
}
pub fn command(&self) -> &Command {
&self.1
}
pub fn params(&self) -> Value {
self.command().params()
}
}
impl Serialize for Request {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
(
MessageDirection::Incoming,
self.id(),
self.command().name(),
self.params(),
)
.serialize(serializer)
}
}
#[derive(Debug, PartialEq)]
pub enum Response {
Result {
id: MessageId,
result: MarionetteResult,
},
Error {
id: MessageId,
error: MarionetteError,
},
}
impl Serialize for Response {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Response::Result { id, result } => {
(MessageDirection::Outgoing, id, Value::Null, &result).serialize(serializer)
}
Response::Error { id, error } => {
(MessageDirection::Outgoing, id, &error, Value::Null).serialize(serializer)
}
}
}
}
#[derive(Debug, PartialEq, Serialize)]
#[serde(untagged)]
pub enum Message {
Incoming(Request),
Outgoing(Response),
}
struct MessageVisitor;
impl<'de> Visitor<'de> for MessageVisitor {
type Value = Message;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("four-element array")
}
fn visit_seq<A: SeqAccess<'de>>(self, mut seq: A) -> Result<Self::Value, A::Error> {
let direction = seq
.next_element::<MessageDirection>()?
.ok_or_else(|| de::Error::invalid_length(0, &self))?;
let id: MessageId = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(1, &self))?;
let msg = match direction {
MessageDirection::Incoming => {
let name: String = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(2, &self))?;
let params: Value = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(3, &self))?;
let command = match params {
Value::Object(ref items) if !items.is_empty() => {
let command_to_params = {
let mut m = Map::new();
m.insert(name, params);
Value::Object(m)
};
serde_json::from_value(command_to_params).map_err(de::Error::custom)
}
Value::Object(_) | Value::Null => {
serde_json::from_value(Value::String(name)).map_err(de::Error::custom)
}
x => Err(de::Error::custom(format!("unknown params type: {}", x))),
}?;
Message::Incoming(Request(id, command))
}
MessageDirection::Outgoing => {
let maybe_error: Option<MarionetteError> = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(2, &self))?;
let response = if let Some(error) = maybe_error {
seq.next_element::<Value>()?
.ok_or_else(|| de::Error::invalid_length(3, &self))?
.as_null()
.ok_or_else(|| de::Error::invalid_type(Unexpected::Unit, &self))?;
Response::Error { id, error }
} else {
let result: MarionetteResult = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(3, &self))?;
Response::Result { id, result }
};
Message::Outgoing(response)
}
};
Ok(msg)
}
}
impl<'de> Deserialize<'de> for Message {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_seq(MessageVisitor)
}
}
#[cfg(test)]
mod tests {
use serde_json::json;
use super::*;
use crate::common::*;
use crate::error::{ErrorKind, MarionetteError};
use crate::test::assert_ser_de;
#[test]
fn test_incoming() {
let json =
json!([0, 42, "WebDriver:FindElement", {"using": "css selector", "value": "value"}]);
let find_element = webdriver::Command::FindElement(webdriver::Locator {
using: webdriver::Selector::Css,
value: "value".into(),
});
let req = Request(42, Command::WebDriver(find_element));
let msg = Message::Incoming(req);
assert_ser_de(&msg, json);
}
#[test]
fn test_incoming_empty_params() {
let json = json!([0, 42, "WebDriver:GetTimeouts", {}]);
let req = Request(42, Command::WebDriver(webdriver::Command::GetTimeouts));
let msg = Message::Incoming(req);
assert_ser_de(&msg, json);
}
#[test]
fn test_incoming_common_params() {
let json = json!([0, 42, "Marionette:AcceptConnections", {"value": false}]);
let params = BoolValue::new(false);
let req = Request(
42,
Command::Marionette(marionette::Command::AcceptConnections(params)),
);
let msg = Message::Incoming(req);
assert_ser_de(&msg, json);
}
#[test]
fn test_incoming_params_derived() {
assert!(serde_json::from_value::<Message>(
json!([0,42,"WebDriver:FindElement",{"using":"foo","value":"foo"}])
)
.is_err());
assert!(serde_json::from_value::<Message>(
json!([0,42,"Marionette:AcceptConnections",{"value":"foo"}])
)
.is_err());
}
#[test]
fn test_incoming_no_params() {
assert!(serde_json::from_value::<Message>(
json!([0,42,"WebDriver:GetTimeouts",{"value":true}])
)
.is_err());
assert!(serde_json::from_value::<Message>(
json!([0,42,"Marionette:Context",{"value":"foo"}])
)
.is_err());
assert!(serde_json::from_value::<Message>(
json!([0,42,"Marionette:GetScreenOrientation",{"value":true}])
)
.is_err());
}
#[test]
fn test_outgoing_result() {
let json = json!([1, 42, null, { "value": null }]);
let result = MarionetteResult::Null;
let msg = Message::Outgoing(Response::Result { id: 42, result });
assert_ser_de(&msg, json);
}
#[test]
fn test_outgoing_error() {
let json =
json!([1, 42, {"error": "no such element", "message": "", "stacktrace": ""}, null]);
let error = MarionetteError {
kind: ErrorKind::NoSuchElement,
message: "".into(),
stack: "".into(),
};
let msg = Message::Outgoing(Response::Error { id: 42, error });
assert_ser_de(&msg, json);
}
#[test]
fn test_invalid_type() {
assert!(
serde_json::from_value::<Message>(json!([2, 42, "WebDriver:GetTimeouts", {}])).is_err()
);
assert!(serde_json::from_value::<Message>(json!([3, 42, "no such element", {}])).is_err());
}
#[test]
fn test_missing_fields() {
// all fields are required
assert!(
serde_json::from_value::<Message>(json!([2, 42, "WebDriver:GetTimeouts"])).is_err()
);
assert!(serde_json::from_value::<Message>(json!([2, 42])).is_err());
assert!(serde_json::from_value::<Message>(json!([2])).is_err());
assert!(serde_json::from_value::<Message>(json!([])).is_err());
}
#[test]
fn test_unknown_command() {
assert!(serde_json::from_value::<Message>(json!([0, 42, "hooba", {}])).is_err());
}
#[test]
fn test_unknown_error() {
assert!(serde_json::from_value::<Message>(json!([1, 42, "flooba", {}])).is_err());
}
#[test]
fn test_message_id_bounds() {
let overflow = i64::from(std::u32::MAX) + 1;
let underflow = -1;
fn get_timeouts(message_id: i64) -> Value {
json!([0, message_id, "WebDriver:GetTimeouts", {}])
}
assert!(serde_json::from_value::<Message>(get_timeouts(overflow)).is_err());
assert!(serde_json::from_value::<Message>(get_timeouts(underflow)).is_err());
}
}

@ -0,0 +1,223 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use serde::de;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde_json::Value;
use crate::common::{Cookie, Timeouts, WebElement};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NewWindow {
handle: String,
#[serde(rename = "type")]
type_hint: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WindowRect {
pub x: i32,
pub y: i32,
pub width: i32,
pub height: i32,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ElementRect {
pub x: f64,
pub y: f64,
pub width: f64,
pub height: f64,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum MarionetteResult {
#[serde(deserialize_with = "from_value", serialize_with = "to_value")]
Bool(bool),
#[serde(deserialize_with = "from_value", serialize_with = "to_empty_value")]
Null,
NewWindow(NewWindow),
WindowRect(WindowRect),
ElementRect(ElementRect),
#[serde(deserialize_with = "from_value", serialize_with = "to_value")]
String(String),
Strings(Vec<String>),
#[serde(deserialize_with = "from_value", serialize_with = "to_value")]
WebElement(WebElement),
WebElements(Vec<WebElement>),
Cookies(Vec<Cookie>),
Timeouts(Timeouts),
}
fn to_value<T, S>(data: T, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
T: Serialize,
{
#[derive(Serialize)]
struct Wrapper<T> {
value: T,
}
Wrapper { value: data }.serialize(serializer)
}
fn to_empty_value<S>(serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
#[derive(Serialize)]
struct Wrapper {
value: Value,
}
Wrapper { value: Value::Null }.serialize(serializer)
}
fn from_value<'de, D, T>(deserializer: D) -> Result<T, D::Error>
where
D: Deserializer<'de>,
T: serde::de::DeserializeOwned,
T: std::fmt::Debug,
{
#[derive(Debug, Deserialize)]
struct Wrapper<T> {
value: T,
}
let v = Value::deserialize(deserializer)?;
if v.is_object() {
let w = serde_json::from_value::<Wrapper<T>>(v).map_err(de::Error::custom)?;
Ok(w.value)
} else {
Err(de::Error::custom("Cannot be deserialized to struct"))
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test::{assert_de, assert_ser_de, ELEMENT_KEY};
use serde_json::json;
#[test]
fn test_boolean_response() {
assert_ser_de(&MarionetteResult::Bool(true), json!({"value": true}));
}
#[test]
fn test_cookies_response() {
let mut data = Vec::new();
data.push(Cookie {
name: "foo".into(),
value: "bar".into(),
path: Some("/common".into()),
domain: Some("web-platform.test".into()),
secure: false,
http_only: false,
expiry: None,
same_site: Some("Strict".into()),
});
assert_ser_de(
&MarionetteResult::Cookies(data),
json!([{"name":"foo","value":"bar","path":"/common","domain":"web-platform.test","secure":false,"httpOnly":false,"sameSite":"Strict"}]),
);
}
#[test]
fn test_new_window_response() {
let data = NewWindow {
handle: "6442450945".into(),
type_hint: "tab".into(),
};
let json = json!({"handle": "6442450945", "type": "tab"});
assert_ser_de(&MarionetteResult::NewWindow(data), json);
}
#[test]
fn test_web_element_response() {
let data = WebElement {
element: "foo".into(),
};
assert_ser_de(
&MarionetteResult::WebElement(data),
json!({"value": {ELEMENT_KEY: "foo"}}),
);
}
#[test]
fn test_web_elements_response() {
let data = vec![
WebElement {
element: "foo".into(),
},
WebElement {
element: "bar".into(),
},
];
assert_ser_de(
&MarionetteResult::WebElements(data),
json!([{ELEMENT_KEY: "foo"}, {ELEMENT_KEY: "bar"}]),
);
}
#[test]
fn test_timeouts_response() {
let data = Timeouts {
implicit: Some(1000),
page_load: Some(200000),
script: Some(Some(60000)),
};
assert_ser_de(
&MarionetteResult::Timeouts(data),
json!({"implicit":1000,"pageLoad":200000,"script":60000}),
);
}
#[test]
fn test_string_response() {
assert_ser_de(
&MarionetteResult::String("foo".into()),
json!({"value": "foo"}),
);
}
#[test]
fn test_strings_response() {
assert_ser_de(
&MarionetteResult::Strings(vec!["2147483649".to_string()]),
json!(["2147483649"]),
);
}
#[test]
fn test_null_response() {
assert_ser_de(&MarionetteResult::Null, json!({ "value": null }));
}
#[test]
fn test_window_rect_response() {
let data = WindowRect {
x: 100,
y: 100,
width: 800,
height: 600,
};
let json = json!({"x": 100, "y": 100, "width": 800, "height": 600});
assert_ser_de(&MarionetteResult::WindowRect(data), json);
}
#[test]
fn test_element_rect_response() {
let data = ElementRect {
x: 8.0,
y: 8.0,
width: 148.6666717529297,
height: 22.0,
};
let json = json!({"x": 8, "y": 8, "width": 148.6666717529297, "height": 22});
assert_de(&MarionetteResult::ElementRect(data), json);
}
}

@ -0,0 +1,35 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
pub static ELEMENT_KEY: &'static str = "element-6066-11e4-a52e-4f735466cecf";
pub fn assert_ser_de<T>(data: &T, json: serde_json::Value)
where
T: std::fmt::Debug,
T: std::cmp::PartialEq,
T: serde::de::DeserializeOwned,
T: serde::Serialize,
{
assert_eq!(serde_json::to_value(data).unwrap(), json);
assert_eq!(data, &serde_json::from_value::<T>(json).unwrap());
}
#[allow(dead_code)]
pub fn assert_ser<T>(data: &T, json: serde_json::Value)
where
T: std::fmt::Debug,
T: std::cmp::PartialEq,
T: serde::Serialize,
{
assert_eq!(serde_json::to_value(data).unwrap(), json);
}
pub fn assert_de<T>(data: &T, json: serde_json::Value)
where
T: std::fmt::Debug,
T: std::cmp::PartialEq,
T: serde::de::DeserializeOwned,
{
assert_eq!(data, &serde_json::from_value::<T>(json).unwrap());
}

@ -0,0 +1,512 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use serde::{Deserialize, Serialize};
use serde_json::Value;
use crate::common::{from_cookie, from_name, to_cookie, to_name, Cookie, Frame, Timeouts, Window};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Url {
pub url: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Locator {
pub using: Selector,
pub value: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Selector {
#[serde(rename = "css selector")]
Css,
#[serde(rename = "link text")]
LinkText,
#[serde(rename = "partial link text")]
PartialLinkText,
#[serde(rename = "tag name")]
TagName,
#[serde(rename = "xpath")]
XPath,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NewWindow {
#[serde(rename = "type", skip_serializing_if = "Option::is_none")]
pub type_hint: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WindowRect {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub x: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub y: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub width: Option<i32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub height: Option<i32>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Keys {
pub text: String,
pub value: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(default, rename_all = "camelCase")]
pub struct PrintParameters {
pub orientation: PrintOrientation,
pub scale: f64,
pub background: bool,
pub page: PrintPage,
pub margin: PrintMargins,
pub page_ranges: Vec<String>,
pub shrink_to_fit: bool,
}
impl Default for PrintParameters {
fn default() -> Self {
PrintParameters {
orientation: PrintOrientation::default(),
scale: 1.0,
background: false,
page: PrintPage::default(),
margin: PrintMargins::default(),
page_ranges: Vec::new(),
shrink_to_fit: true,
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum PrintOrientation {
Landscape,
Portrait,
}
impl Default for PrintOrientation {
fn default() -> Self {
PrintOrientation::Portrait
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrintPage {
pub width: f64,
pub height: f64,
}
impl Default for PrintPage {
fn default() -> Self {
PrintPage {
width: 21.59,
height: 27.94,
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrintMargins {
pub top: f64,
pub bottom: f64,
pub left: f64,
pub right: f64,
}
impl Default for PrintMargins {
fn default() -> Self {
PrintMargins {
top: 1.0,
bottom: 1.0,
left: 1.0,
right: 1.0,
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ScreenshotOptions {
pub id: Option<String>,
pub highlights: Vec<Option<String>>,
pub full: bool,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Script {
pub script: String,
pub args: Option<Vec<Value>>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Command {
#[serde(rename = "WebDriver:AcceptAlert")]
AcceptAlert,
#[serde(
rename = "WebDriver:AddCookie",
serialize_with = "to_cookie",
deserialize_with = "from_cookie"
)]
AddCookie(Cookie),
#[serde(rename = "WebDriver:CloseWindow")]
CloseWindow,
#[serde(
rename = "WebDriver:DeleteCookie",
serialize_with = "to_name",
deserialize_with = "from_name"
)]
DeleteCookie(String),
#[serde(rename = "WebDriver:DeleteAllCookies")]
DeleteCookies,
#[serde(rename = "WebDriver:DeleteSession")]
DeleteSession,
#[serde(rename = "WebDriver:DismissAlert")]
DismissAlert,
#[serde(rename = "WebDriver:ElementClear")]
ElementClear { id: String },
#[serde(rename = "WebDriver:ElementClick")]
ElementClick { id: String },
#[serde(rename = "WebDriver:ElementSendKeys")]
ElementSendKeys {
id: String,
text: String,
value: Vec<String>,
},
#[serde(rename = "WebDriver:ExecuteAsyncScript")]
ExecuteAsyncScript(Script),
#[serde(rename = "WebDriver:ExecuteScript")]
ExecuteScript(Script),
#[serde(rename = "WebDriver:FindElement")]
FindElement(Locator),
#[serde(rename = "WebDriver:FindElements")]
FindElements(Locator),
#[serde(rename = "WebDriver:FindElement")]
FindElementElement {
element: String,
using: Selector,
value: String,
},
#[serde(rename = "WebDriver:FindElements")]
FindElementElements {
element: String,
using: Selector,
value: String,
},
#[serde(rename = "WebDriver:FindElementFromShadowRoot")]
FindShadowRootElement {
#[serde(rename = "shadowRoot")]
shadow_root: String,
using: Selector,
value: String,
},
#[serde(rename = "WebDriver:FindElementsFromShadowRoot")]
FindShadowRootElements {
#[serde(rename = "shadowRoot")]
shadow_root: String,
using: Selector,
value: String,
},
#[serde(rename = "WebDriver:FullscreenWindow")]
FullscreenWindow,
#[serde(rename = "WebDriver:Navigate")]
Get(Url),
#[serde(rename = "WebDriver:GetActiveElement")]
GetActiveElement,
#[serde(rename = "WebDriver:GetAlertText")]
GetAlertText,
#[serde(rename = "WebDriver:GetComputedLabel")]
GetComputedLabel { id: String },
#[serde(rename = "WebDriver:GetComputedRole")]
GetComputedRole { id: String },
#[serde(rename = "WebDriver:GetCookies")]
GetCookies,
#[serde(rename = "WebDriver:GetElementCSSValue")]
GetCSSValue {
id: String,
#[serde(rename = "propertyName")]
property: String,
},
#[serde(rename = "WebDriver:GetCurrentURL")]
GetCurrentUrl,
#[serde(rename = "WebDriver:GetElementAttribute")]
GetElementAttribute { id: String, name: String },
#[serde(rename = "WebDriver:GetElementProperty")]
GetElementProperty { id: String, name: String },
#[serde(rename = "WebDriver:GetElementRect")]
GetElementRect { id: String },
#[serde(rename = "WebDriver:GetElementTagName")]
GetElementTagName { id: String },
#[serde(rename = "WebDriver:GetElementText")]
GetElementText { id: String },
#[serde(rename = "WebDriver:GetPageSource")]
GetPageSource,
#[serde(rename = "WebDriver:GetShadowRoot")]
GetShadowRoot { id: String },
#[serde(rename = "WebDriver:GetTimeouts")]
GetTimeouts,
#[serde(rename = "WebDriver:GetTitle")]
GetTitle,
#[serde(rename = "WebDriver:GetWindowHandle")]
GetWindowHandle,
#[serde(rename = "WebDriver:GetWindowHandles")]
GetWindowHandles,
#[serde(rename = "WebDriver:GetWindowRect")]
GetWindowRect,
#[serde(rename = "WebDriver:Back")]
GoBack,
#[serde(rename = "WebDriver:Forward")]
GoForward,
#[serde(rename = "WebDriver:IsElementDisplayed")]
IsDisplayed { id: String },
#[serde(rename = "WebDriver:IsElementEnabled")]
IsEnabled { id: String },
#[serde(rename = "WebDriver:IsElementSelected")]
IsSelected { id: String },
#[serde(rename = "WebDriver:MaximizeWindow")]
MaximizeWindow,
#[serde(rename = "WebDriver:MinimizeWindow")]
MinimizeWindow,
#[serde(rename = "WebDriver:NewWindow")]
NewWindow(NewWindow),
#[serde(rename = "WebDriver:Print")]
Print(PrintParameters),
#[serde(rename = "WebDriver:Refresh")]
Refresh,
#[serde(rename = "WebDriver:ReleaseActions")]
ReleaseActions,
#[serde(rename = "WebDriver:SendAlertText")]
SendAlertText(Keys),
#[serde(rename = "WebDriver:SetTimeouts")]
SetTimeouts(Timeouts),
#[serde(rename = "WebDriver:SetWindowRect")]
SetWindowRect(WindowRect),
#[serde(rename = "WebDriver:SwitchToFrame")]
SwitchToFrame(Frame),
#[serde(rename = "WebDriver:SwitchToParentFrame")]
SwitchToParentFrame,
#[serde(rename = "WebDriver:SwitchToWindow")]
SwitchToWindow(Window),
#[serde(rename = "WebDriver:TakeScreenshot")]
TakeElementScreenshot(ScreenshotOptions),
#[serde(rename = "WebDriver:TakeScreenshot")]
TakeFullScreenshot(ScreenshotOptions),
#[serde(rename = "WebDriver:TakeScreenshot")]
TakeScreenshot(ScreenshotOptions),
}
#[cfg(test)]
mod tests {
use super::*;
use crate::common::Date;
use crate::test::{assert_ser, assert_ser_de};
use serde_json::json;
#[test]
fn test_json_screenshot() {
let data = ScreenshotOptions {
id: None,
highlights: vec![],
full: false,
};
let json = json!({"full":false,"highlights":[],"id":null});
assert_ser_de(&data, json);
}
#[test]
fn test_json_selector_css() {
assert_ser_de(&Selector::Css, json!("css selector"));
}
#[test]
fn test_json_selector_link_text() {
assert_ser_de(&Selector::LinkText, json!("link text"));
}
#[test]
fn test_json_selector_partial_link_text() {
assert_ser_de(&Selector::PartialLinkText, json!("partial link text"));
}
#[test]
fn test_json_selector_tag_name() {
assert_ser_de(&Selector::TagName, json!("tag name"));
}
#[test]
fn test_json_selector_xpath() {
assert_ser_de(&Selector::XPath, json!("xpath"));
}
#[test]
fn test_json_selector_invalid() {
assert!(serde_json::from_value::<Selector>(json!("foo")).is_err());
}
#[test]
fn test_json_locator() {
let json = json!({
"using": "partial link text",
"value": "link text",
});
let data = Locator {
using: Selector::PartialLinkText,
value: "link text".into(),
};
assert_ser_de(&data, json);
}
#[test]
fn test_json_keys() {
let data = Keys {
text: "Foo".into(),
value: vec!["F".into(), "o".into(), "o".into()],
};
let json = json!({"text": "Foo", "value": ["F", "o", "o"]});
assert_ser_de(&data, json);
}
#[test]
fn test_json_new_window() {
let data = NewWindow {
type_hint: Some("foo".into()),
};
assert_ser_de(&data, json!({ "type": "foo" }));
}
#[test]
fn test_json_window_rect() {
let data = WindowRect {
x: Some(123),
y: None,
width: None,
height: None,
};
assert_ser_de(&data, json!({"x": 123}));
}
#[test]
fn test_command_with_params() {
let locator = Locator {
using: Selector::Css,
value: "value".into(),
};
let json = json!({"WebDriver:FindElement": {"using": "css selector", "value": "value"}});
assert_ser_de(&Command::FindElement(locator), json);
}
#[test]
fn test_command_with_wrapper_params() {
let cookie = Cookie {
name: "hello".into(),
value: "world".into(),
path: None,
domain: None,
secure: false,
http_only: false,
expiry: Some(Date(1564488092)),
same_site: None,
};
let json = json!({"WebDriver:AddCookie": {"cookie": {"name": "hello", "value": "world", "secure": false, "httpOnly": false, "expiry": 1564488092}}});
assert_ser_de(&Command::AddCookie(cookie), json);
}
#[test]
fn test_empty_commands() {
assert_ser_de(&Command::GetTimeouts, json!("WebDriver:GetTimeouts"));
}
#[test]
fn test_json_command_invalid() {
assert!(serde_json::from_value::<Command>(json!("foo")).is_err());
}
#[test]
fn test_json_delete_cookie_command() {
let json = json!({"WebDriver:DeleteCookie": {"name": "foo"}});
assert_ser_de(&Command::DeleteCookie("foo".into()), json);
}
#[test]
fn test_json_new_window_command() {
let data = NewWindow {
type_hint: Some("foo".into()),
};
let json = json!({"WebDriver:NewWindow": {"type": "foo"}});
assert_ser_de(&Command::NewWindow(data), json);
}
#[test]
fn test_json_new_window_command_with_none_value() {
let data = NewWindow { type_hint: None };
let json = json!({"WebDriver:NewWindow": {}});
assert_ser_de(&Command::NewWindow(data), json);
}
#[test]
fn test_json_command_as_struct() {
assert_ser(
&Command::FindElementElement {
element: "foo".into(),
using: Selector::XPath,
value: "bar".into(),
},
json!({"WebDriver:FindElement": {"element": "foo", "using": "xpath", "value": "bar" }}),
);
}
#[test]
fn test_json_get_computed_label_command() {
assert_ser_de(
&Command::GetComputedLabel { id: "foo".into() },
json!({"WebDriver:GetComputedLabel": {"id": "foo"}}),
);
}
#[test]
fn test_json_get_computed_role_command() {
assert_ser_de(
&Command::GetComputedRole { id: "foo".into() },
json!({"WebDriver:GetComputedRole": {"id": "foo"}}),
);
}
#[test]
fn test_json_get_css_value() {
assert_ser_de(
&Command::GetCSSValue {
id: "foo".into(),
property: "bar".into(),
},
json!({"WebDriver:GetElementCSSValue": {"id": "foo", "propertyName": "bar"}}),
);
}
#[test]
fn test_json_find_shadow_root_element() {
assert_ser_de(
&Command::FindShadowRootElement {
shadow_root: "foo".into(),
using: Selector::Css,
value: "bar".into(),
},
json!({"WebDriver:FindElementFromShadowRoot": {"shadowRoot": "foo", "using": "css selector", "value": "bar"}}),
);
}
#[test]
fn test_json_find_shadow_root_elements() {
assert_ser_de(
&Command::FindShadowRootElements {
shadow_root: "foo".into(),
using: Selector::Css,
value: "bar".into(),
},
json!({"WebDriver:FindElementsFromShadowRoot": {"shadowRoot": "foo", "using": "css selector", "value": "bar"}}),
);
}
}

@ -0,0 +1,533 @@
use crate::capabilities::AndroidOptions;
use mozdevice::{AndroidStorage, Device, Host, UnixPathBuf};
use mozprofile::profile::Profile;
use serde::Serialize;
use serde_yaml::{Mapping, Value};
use std::fmt;
use std::io;
use std::time;
use webdriver::error::{ErrorStatus, WebDriverError};
// TODO: avoid port clashes across GeckoView-vehicles.
// For now, we always use target port 2829, leading to issues like bug 1533704.
const MARIONETTE_TARGET_PORT: u16 = 2829;
const CONFIG_FILE_HEADING: &str = r#"## GeckoView configuration YAML
##
## Auto-generated by geckodriver.
## See https://mozilla.github.io/geckoview/consumer/docs/automation.
"#;
pub type Result<T> = std::result::Result<T, AndroidError>;
#[derive(Debug)]
pub enum AndroidError {
ActivityNotFound(String),
Device(mozdevice::DeviceError),
IO(io::Error),
PackageNotFound(String),
Serde(serde_yaml::Error),
}
impl fmt::Display for AndroidError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
AndroidError::ActivityNotFound(ref package) => {
write!(f, "Activity for package '{}' not found", package)
}
AndroidError::Device(ref message) => message.fmt(f),
AndroidError::IO(ref message) => message.fmt(f),
AndroidError::PackageNotFound(ref package) => {
write!(f, "Package '{}' not found", package)
}
AndroidError::Serde(ref message) => message.fmt(f),
}
}
}
impl From<io::Error> for AndroidError {
fn from(value: io::Error) -> AndroidError {
AndroidError::IO(value)
}
}
impl From<mozdevice::DeviceError> for AndroidError {
fn from(value: mozdevice::DeviceError) -> AndroidError {
AndroidError::Device(value)
}
}
impl From<serde_yaml::Error> for AndroidError {
fn from(value: serde_yaml::Error) -> AndroidError {
AndroidError::Serde(value)
}
}
impl From<AndroidError> for WebDriverError {
fn from(value: AndroidError) -> WebDriverError {
WebDriverError::new(ErrorStatus::UnknownError, value.to_string())
}
}
/// A remote Gecko instance.
///
/// Host refers to the device running `geckodriver`. Target refers to the
/// Android device running Gecko in a GeckoView-based vehicle.
#[derive(Debug)]
pub struct AndroidProcess {
pub device: Device,
pub package: String,
pub activity: String,
}
impl AndroidProcess {
pub fn new(
device: Device,
package: String,
activity: String,
) -> mozdevice::Result<AndroidProcess> {
Ok(AndroidProcess {
device,
package,
activity,
})
}
}
#[derive(Debug)]
pub struct AndroidHandler {
pub config: UnixPathBuf,
pub options: AndroidOptions,
pub process: AndroidProcess,
pub profile: UnixPathBuf,
pub test_root: UnixPathBuf,
// Port forwarding for Marionette: host => target
pub marionette_host_port: u16,
pub marionette_target_port: u16,
// Port forwarding for WebSocket connections (WebDriver BiDi and CDP)
pub websocket_port: Option<u16>,
}
impl Drop for AndroidHandler {
fn drop(&mut self) {
// Try to clean up various settings
let clear_command = format!("am clear-debug-app {}", self.process.package);
match self
.process
.device
.execute_host_shell_command(&clear_command)
{
Ok(_) => debug!("Disabled reading from configuration file"),
Err(e) => error!("Failed disabling from configuration file: {}", e),
}
match self.process.device.remove(&self.config) {
Ok(_) => debug!("Deleted GeckoView configuration file"),
Err(e) => error!("Failed deleting GeckoView configuration file: {}", e),
}
match self.process.device.remove(&self.test_root) {
Ok(_) => debug!("Deleted test root folder: {}", &self.test_root.display()),
Err(e) => error!("Failed deleting test root folder: {}", e),
}
match self
.process
.device
.kill_forward_port(self.marionette_host_port)
{
Ok(_) => debug!(
"Marionette port forward ({} -> {}) stopped",
&self.marionette_host_port, &self.marionette_target_port
),
Err(e) => error!(
"Marionette port forward ({} -> {}) failed to stop: {}",
&self.marionette_host_port, &self.marionette_target_port, e
),
}
if let Some(port) = self.websocket_port {
match self.process.device.kill_forward_port(port) {
Ok(_) => debug!("WebSocket port forward ({0} -> {0}) stopped", &port),
Err(e) => error!(
"WebSocket port forward ({0} -> {0}) failed to stop: {1}",
&port, e
),
}
}
}
}
impl AndroidHandler {
pub fn new(
options: &AndroidOptions,
marionette_host_port: u16,
websocket_port: Option<u16>,
) -> Result<AndroidHandler> {
// We need to push profile.pathbuf to a safe space on the device.
// Make it per-Android package to avoid clashes and confusion.
// This naming scheme follows GeckoView's configuration file naming scheme,
// see bug 1533385.
let host = Host {
host: None,
port: None,
read_timeout: Some(time::Duration::from_millis(5000)),
write_timeout: Some(time::Duration::from_millis(5000)),
};
let mut device = host.device_or_default(options.device_serial.as_ref(), options.storage)?;
// Set up port forwarding for Marionette.
device.forward_port(marionette_host_port, MARIONETTE_TARGET_PORT)?;
debug!(
"Marionette port forward ({} -> {}) started",
marionette_host_port, MARIONETTE_TARGET_PORT
);
if let Some(port) = websocket_port {
// Set up port forwarding for WebSocket connections (WebDriver BiDi, and CDP).
device.forward_port(port, port)?;
debug!("WebSocket port forward ({} -> {}) started", port, port);
}
let test_root = match device.storage {
AndroidStorage::App => {
device.run_as_package = Some(options.package.to_owned());
let mut buf = UnixPathBuf::from("/data/data");
buf.push(&options.package);
buf.push("test_root");
buf
}
AndroidStorage::Internal => UnixPathBuf::from("/data/local/tmp/test_root"),
AndroidStorage::Sdcard => {
// We need to push the profile to a location on the device that can also
// be read and write by the application, and works for unrooted devices.
// The only location that meets this criteria is under:
// $EXTERNAL_STORAGE/Android/data/%options.package%/files
let response = device.execute_host_shell_command("echo $EXTERNAL_STORAGE")?;
let mut buf = UnixPathBuf::from(response.trim_end_matches('\n'));
buf.push("Android/data");
buf.push(&options.package);
buf.push("files/test_root");
buf
}
};
debug!(
"Connecting: options={:?}, storage={:?}) test_root={}, run_as_package={:?}",
options,
device.storage,
test_root.display(),
device.run_as_package
);
let mut profile = test_root.clone();
profile.push(format!("{}-geckodriver-profile", &options.package));
// Check if the specified package is installed
let response =
device.execute_host_shell_command(&format!("pm list packages {}", &options.package))?;
let mut packages = response
.trim()
.split_terminator('\n')
.filter(|line| line.starts_with("package:"))
.map(|line| line.rsplit(':').next().expect("Package name found"));
if !packages.any(|x| x == options.package.as_str()) {
return Err(AndroidError::PackageNotFound(options.package.clone()));
}
let config = UnixPathBuf::from(format!(
"/data/local/tmp/{}-geckoview-config.yaml",
&options.package
));
// If activity hasn't been specified default to the main activity of the package
let activity = match options.activity {
Some(ref activity) => activity.clone(),
None => {
let response = device.execute_host_shell_command(&format!(
"cmd package resolve-activity --brief {}",
&options.package
))?;
let activities = response
.split_terminator('\n')
.filter(|line| line.starts_with(&options.package))
.map(|line| line.rsplit('/').next().unwrap())
.collect::<Vec<&str>>();
if activities.is_empty() {
return Err(AndroidError::ActivityNotFound(options.package.clone()));
}
activities[0].to_owned()
}
};
let process = AndroidProcess::new(device, options.package.clone(), activity)?;
Ok(AndroidHandler {
config,
process,
profile,
test_root,
marionette_host_port,
marionette_target_port: MARIONETTE_TARGET_PORT,
options: options.clone(),
websocket_port,
})
}
pub fn generate_config_file<I, K, V>(
&self,
args: Option<Vec<String>>,
envs: I,
) -> Result<String>
where
I: IntoIterator<Item = (K, V)>,
K: ToString,
V: ToString,
{
// To configure GeckoView, we use the automation techniques documented at
// https://mozilla.github.io/geckoview/consumer/docs/automation.
#[derive(Serialize, Deserialize, PartialEq, Eq, Debug)]
pub struct Config {
pub env: Mapping,
pub args: Vec<String>,
}
let mut config = Config {
args: vec![
"--marionette".into(),
"--profile".into(),
self.profile.display().to_string(),
],
env: Mapping::new(),
};
config.args.append(&mut args.unwrap_or_default());
for (key, value) in envs {
config.env.insert(
Value::String(key.to_string()),
Value::String(value.to_string()),
);
}
config.env.insert(
Value::String("MOZ_CRASHREPORTER".to_owned()),
Value::String("1".to_owned()),
);
config.env.insert(
Value::String("MOZ_CRASHREPORTER_NO_REPORT".to_owned()),
Value::String("1".to_owned()),
);
config.env.insert(
Value::String("MOZ_CRASHREPORTER_SHUTDOWN".to_owned()),
Value::String("1".to_owned()),
);
let mut contents: Vec<String> = vec![CONFIG_FILE_HEADING.to_owned()];
contents.push(serde_yaml::to_string(&config)?);
Ok(contents.concat())
}
pub fn prepare<I, K, V>(
&self,
profile: &Profile,
args: Option<Vec<String>>,
env: I,
) -> Result<()>
where
I: IntoIterator<Item = (K, V)>,
K: ToString,
V: ToString,
{
self.process.device.clear_app_data(&self.process.package)?;
// These permissions, at least, are required to read profiles in /mnt/sdcard.
for perm in &["READ_EXTERNAL_STORAGE", "WRITE_EXTERNAL_STORAGE"] {
self.process.device.execute_host_shell_command(&format!(
"pm grant {} android.permission.{}",
&self.process.package, perm
))?;
}
// Make sure to create the test root.
self.process.device.create_dir(&self.test_root)?;
self.process.device.chmod(&self.test_root, "777", true)?;
// Replace the profile
self.process.device.remove(&self.profile)?;
self.process
.device
.push_dir(&profile.path, &self.profile, 0o777)?;
let contents = self.generate_config_file(args, env)?;
debug!("Content of generated GeckoView config file:\n{}", contents);
let reader = &mut io::BufReader::new(contents.as_bytes());
debug!(
"Pushing GeckoView configuration file to {}",
self.config.display()
);
self.process.device.push(reader, &self.config, 0o777)?;
// Tell GeckoView to read configuration even when `android:debuggable="false"`.
self.process.device.execute_host_shell_command(&format!(
"am set-debug-app --persistent {}",
self.process.package
))?;
Ok(())
}
pub fn launch(&self) -> Result<()> {
// TODO: Remove the usage of intent arguments once Fennec is no longer
// supported. Packages which are using GeckoView always read the arguments
// via the YAML configuration file.
let mut intent_arguments = self
.options
.intent_arguments
.clone()
.unwrap_or_else(|| Vec::with_capacity(3));
intent_arguments.push("--es".to_owned());
intent_arguments.push("args".to_owned());
intent_arguments.push(format!("--marionette --profile {}", self.profile.display()));
debug!(
"Launching {}/{}",
self.process.package, self.process.activity
);
self.process
.device
.launch(
&self.process.package,
&self.process.activity,
&intent_arguments,
)
.map_err(|e| {
let message = format!(
"Could not launch Android {}/{}: {}",
self.process.package, self.process.activity, e
);
mozdevice::DeviceError::Adb(message)
})?;
Ok(())
}
pub fn force_stop(&self) -> Result<()> {
debug!(
"Force stopping the Android package: {}",
&self.process.package
);
self.process.device.force_stop(&self.process.package)?;
Ok(())
}
}
#[cfg(test)]
mod test {
// To successfully run those tests the geckoview_example package needs to
// be installed on the device or emulator. After setting up the build
// environment (https://mzl.la/3muLv5M), the following mach commands have to
// be executed:
//
// $ ./mach build && ./mach install
//
// Currently the mozdevice API is not safe for multiple requests at the same
// time. It is recommended to run each of the unit tests on its own. Also adb
// specific tests cannot be run in CI yet. To check those locally, also run
// the ignored tests.
//
// Use the following command to accomplish that:
//
// $ cargo test -- --ignored --test-threads=1
use crate::android::AndroidHandler;
use crate::capabilities::AndroidOptions;
use mozdevice::{AndroidStorage, AndroidStorageInput, UnixPathBuf};
fn run_handler_storage_test(package: &str, storage: AndroidStorageInput) {
let options = AndroidOptions::new(package.to_owned(), storage);
let handler = AndroidHandler::new(&options, 4242, None).expect("has valid Android handler");
assert_eq!(handler.options, options);
assert_eq!(handler.process.package, package);
let expected_config_path = UnixPathBuf::from(format!(
"/data/local/tmp/{}-geckoview-config.yaml",
&package
));
assert_eq!(handler.config, expected_config_path);
if handler.process.device.storage == AndroidStorage::App {
assert_eq!(
handler.process.device.run_as_package,
Some(package.to_owned())
);
} else {
assert_eq!(handler.process.device.run_as_package, None);
}
let test_root = match handler.process.device.storage {
AndroidStorage::App => {
let mut buf = UnixPathBuf::from("/data/data");
buf.push(&package);
buf.push("test_root");
buf
}
AndroidStorage::Internal => UnixPathBuf::from("/data/local/tmp/test_root"),
AndroidStorage::Sdcard => {
let response = handler
.process
.device
.execute_host_shell_command("echo $EXTERNAL_STORAGE")
.unwrap();
let mut buf = UnixPathBuf::from(response.trim_end_matches('\n'));
buf.push("Android/data/");
buf.push(&package);
buf.push("files/test_root");
buf
}
};
assert_eq!(handler.test_root, test_root);
let mut profile = test_root;
profile.push(format!("{}-geckodriver-profile", &package));
assert_eq!(handler.profile, profile);
}
#[test]
#[ignore]
fn android_handler_storage_as_app() {
let package = "org.mozilla.geckoview_example";
run_handler_storage_test(package, AndroidStorageInput::App);
}
#[test]
#[ignore]
fn android_handler_storage_as_auto() {
let package = "org.mozilla.geckoview_example";
run_handler_storage_test(package, AndroidStorageInput::Auto);
}
#[test]
#[ignore]
fn android_handler_storage_as_internal() {
let package = "org.mozilla.geckoview_example";
run_handler_storage_test(package, AndroidStorageInput::Internal);
}
#[test]
#[ignore]
fn android_handler_storage_as_sdcard() {
let package = "org.mozilla.geckoview_example";
run_handler_storage_test(package, AndroidStorageInput::Sdcard);
}
}

@ -0,0 +1,554 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use crate::android::AndroidHandler;
use crate::capabilities::{FirefoxOptions, ProfileType};
use crate::logging;
use crate::prefs;
use mozprofile::preferences::Pref;
use mozprofile::profile::{PrefFile, Profile};
use mozrunner::runner::{FirefoxProcess, FirefoxRunner, Runner, RunnerProcess};
use std::fs;
use std::io::Read;
use std::path::{Path, PathBuf};
use std::time;
use webdriver::error::{ErrorStatus, WebDriverError, WebDriverResult};
/// A running Gecko instance.
#[derive(Debug)]
#[allow(clippy::large_enum_variant)]
pub(crate) enum Browser {
Local(LocalBrowser),
Remote(RemoteBrowser),
/// An existing browser instance not controlled by GeckoDriver
Existing(u16),
}
impl Browser {
pub(crate) fn close(self, wait_for_shutdown: bool) -> WebDriverResult<()> {
match self {
Browser::Local(x) => x.close(wait_for_shutdown),
Browser::Remote(x) => x.close(),
Browser::Existing(_) => Ok(()),
}
}
pub(crate) fn marionette_port(&mut self) -> WebDriverResult<Option<u16>> {
match self {
Browser::Local(x) => x.marionette_port(),
Browser::Remote(x) => x.marionette_port(),
Browser::Existing(x) => Ok(Some(*x)),
}
}
pub(crate) fn update_marionette_port(&mut self, port: u16) {
match self {
Browser::Local(x) => x.update_marionette_port(port),
Browser::Remote(x) => x.update_marionette_port(port),
Browser::Existing(x) => {
if port != *x {
error!(
"Cannot re-assign Marionette port when connected to an existing browser"
);
}
}
}
}
}
#[derive(Debug)]
/// A local Firefox process, running on this (host) device.
pub(crate) struct LocalBrowser {
marionette_port: u16,
prefs_backup: Option<PrefsBackup>,
process: FirefoxProcess,
profile_path: Option<PathBuf>,
}
impl LocalBrowser {
pub(crate) fn new(
options: FirefoxOptions,
marionette_port: u16,
jsdebugger: bool,
profile_root: Option<&Path>,
) -> WebDriverResult<LocalBrowser> {
let binary = options.binary.ok_or_else(|| {
WebDriverError::new(
ErrorStatus::SessionNotCreated,
"Expected browser binary location, but unable to find \
binary in default location, no \
'moz:firefoxOptions.binary' capability provided, and \
no binary flag set on the command line",
)
})?;
let is_custom_profile = matches!(options.profile, ProfileType::Path(_));
let mut profile = match options.profile {
ProfileType::Named => None,
ProfileType::Path(x) => Some(x),
ProfileType::Temporary => Some(Profile::new(profile_root)?),
};
let (profile_path, prefs_backup) = if let Some(ref mut profile) = profile {
let profile_path = profile.path.clone();
let prefs_backup = set_prefs(
marionette_port,
profile,
is_custom_profile,
options.prefs,
jsdebugger,
)
.map_err(|e| {
WebDriverError::new(
ErrorStatus::SessionNotCreated,
format!("Failed to set preferences: {}", e),
)
})?;
(Some(profile_path), prefs_backup)
} else {
warn!("Unable to set geckodriver prefs when using a named profile");
(None, None)
};
let mut runner = FirefoxRunner::new(&binary, profile);
runner.arg("--marionette");
if jsdebugger {
runner.arg("--jsdebugger");
}
if let Some(args) = options.args.as_ref() {
runner.args(args);
}
// https://developer.mozilla.org/docs/Environment_variables_affecting_crash_reporting
runner
.env("MOZ_CRASHREPORTER", "1")
.env("MOZ_CRASHREPORTER_NO_REPORT", "1")
.env("MOZ_CRASHREPORTER_SHUTDOWN", "1");
let process = match runner.start() {
Ok(process) => process,
Err(e) => {
if let Some(backup) = prefs_backup {
backup.restore();
}
return Err(WebDriverError::new(
ErrorStatus::SessionNotCreated,
format!("Failed to start browser {}: {}", binary.display(), e),
));
}
};
Ok(LocalBrowser {
marionette_port,
prefs_backup,
process,
profile_path,
})
}
fn close(mut self, wait_for_shutdown: bool) -> WebDriverResult<()> {
if wait_for_shutdown {
// TODO(https://bugzil.la/1443922):
// Use toolkit.asyncshutdown.crash_timout pref
let duration = time::Duration::from_secs(70);
match self.process.wait(duration) {
Ok(x) => debug!("Browser process stopped: {}", x),
Err(e) => error!("Failed to stop browser process: {}", e),
}
}
self.process.kill()?;
// Restoring the prefs if the browser fails to stop perhaps doesn't work anyway
if let Some(prefs_backup) = self.prefs_backup {
prefs_backup.restore();
};
Ok(())
}
fn marionette_port(&mut self) -> WebDriverResult<Option<u16>> {
if self.marionette_port != 0 {
return Ok(Some(self.marionette_port));
}
if let Some(profile_path) = self.profile_path.as_ref() {
return Ok(read_marionette_port(profile_path));
}
// This should be impossible, but it isn't enforced
Err(WebDriverError::new(
ErrorStatus::SessionNotCreated,
"Port not known when using named profile",
))
}
fn update_marionette_port(&mut self, port: u16) {
self.marionette_port = port;
}
pub(crate) fn check_status(&mut self) -> Option<String> {
match self.process.try_wait() {
Ok(Some(status)) => Some(
status
.code()
.map(|c| c.to_string())
.unwrap_or_else(|| "signal".into()),
),
Ok(None) => None,
Err(_) => Some("{unknown}".into()),
}
}
}
fn read_marionette_port(profile_path: &Path) -> Option<u16> {
let port_file = profile_path.join("MarionetteActivePort");
let mut port_str = String::with_capacity(6);
let mut file = match fs::File::open(&port_file) {
Ok(file) => file,
Err(_) => {
trace!("Failed to open {}", &port_file.to_string_lossy());
return None;
}
};
if let Err(e) = file.read_to_string(&mut port_str) {
trace!("Failed to read {}: {}", &port_file.to_string_lossy(), e);
return None;
};
println!("Read port: {}", port_str);
let port = port_str.parse::<u16>().ok();
if port.is_none() {
warn!("Failed fo convert {} to u16", &port_str);
}
port
}
#[derive(Debug)]
/// A remote instance, running on a (target) Android device.
pub(crate) struct RemoteBrowser {
handler: AndroidHandler,
marionette_port: u16,
prefs_backup: Option<PrefsBackup>,
}
impl RemoteBrowser {
pub(crate) fn new(
options: FirefoxOptions,
marionette_port: u16,
websocket_port: Option<u16>,
profile_root: Option<&Path>,
) -> WebDriverResult<RemoteBrowser> {
let android_options = options.android.unwrap();
let handler = AndroidHandler::new(&android_options, marionette_port, websocket_port)?;
// Profile management.
let (mut profile, is_custom_profile) = match options.profile {
ProfileType::Named => {
return Err(WebDriverError::new(
ErrorStatus::SessionNotCreated,
"Cannot use a named profile on Android",
));
}
ProfileType::Path(x) => (x, true),
ProfileType::Temporary => (Profile::new(profile_root)?, false),
};
let prefs_backup = set_prefs(
handler.marionette_target_port,
&mut profile,
is_custom_profile,
options.prefs,
false,
)
.map_err(|e| {
WebDriverError::new(
ErrorStatus::SessionNotCreated,
format!("Failed to set preferences: {}", e),
)
})?;
handler.prepare(&profile, options.args, options.env.unwrap_or_default())?;
handler.launch()?;
Ok(RemoteBrowser {
handler,
marionette_port,
prefs_backup,
})
}
fn close(self) -> WebDriverResult<()> {
self.handler.force_stop()?;
// Restoring the prefs if the browser fails to stop perhaps doesn't work anyway
if let Some(prefs_backup) = self.prefs_backup {
prefs_backup.restore();
};
Ok(())
}
fn marionette_port(&mut self) -> WebDriverResult<Option<u16>> {
Ok(Some(self.marionette_port))
}
fn update_marionette_port(&mut self, port: u16) {
self.marionette_port = port;
}
}
fn set_prefs(
port: u16,
profile: &mut Profile,
custom_profile: bool,
extra_prefs: Vec<(String, Pref)>,
js_debugger: bool,
) -> WebDriverResult<Option<PrefsBackup>> {
let prefs = profile.user_prefs().map_err(|_| {
WebDriverError::new(
ErrorStatus::UnknownError,
"Unable to read profile preferences file",
)
})?;
let backup_prefs = if custom_profile && prefs.path.exists() {
Some(PrefsBackup::new(prefs)?)
} else {
None
};
for &(name, ref value) in prefs::DEFAULT.iter() {
if !custom_profile || !prefs.contains_key(name) {
prefs.insert(name.to_string(), (*value).clone());
}
}
prefs.insert_slice(&extra_prefs[..]);
if js_debugger {
prefs.insert("devtools.browsertoolbox.panel", Pref::new("jsdebugger"));
prefs.insert("devtools.debugger.remote-enabled", Pref::new(true));
prefs.insert("devtools.chrome.enabled", Pref::new(true));
prefs.insert("devtools.debugger.prompt-connection", Pref::new(false));
}
prefs.insert("marionette.port", Pref::new(port));
prefs.insert("remote.log.level", logging::max_level().into());
prefs.write().map_err(|e| {
WebDriverError::new(
ErrorStatus::UnknownError,
format!("Unable to write Firefox profile: {}", e),
)
})?;
Ok(backup_prefs)
}
#[derive(Debug)]
struct PrefsBackup {
orig_path: PathBuf,
backup_path: PathBuf,
}
impl PrefsBackup {
fn new(prefs: &PrefFile) -> WebDriverResult<PrefsBackup> {
let mut prefs_backup_path = prefs.path.clone();
let mut counter = 0;
while {
let ext = if counter > 0 {
format!("geckodriver_backup_{}", counter)
} else {
"geckodriver_backup".to_string()
};
prefs_backup_path.set_extension(ext);
prefs_backup_path.exists()
} {
counter += 1
}
debug!("Backing up prefs to {:?}", prefs_backup_path);
fs::copy(&prefs.path, &prefs_backup_path)?;
Ok(PrefsBackup {
orig_path: prefs.path.clone(),
backup_path: prefs_backup_path,
})
}
fn restore(self) {
if self.backup_path.exists() {
let _ = fs::rename(self.backup_path, self.orig_path);
}
}
}
#[cfg(test)]
mod tests {
use super::set_prefs;
use crate::browser::read_marionette_port;
use crate::capabilities::{FirefoxOptions, ProfileType};
use mozprofile::preferences::{Pref, PrefValue};
use mozprofile::profile::Profile;
use serde_json::{Map, Value};
use std::fs::File;
use std::io::{Read, Write};
use std::path::Path;
use tempfile::tempdir;
fn example_profile() -> Value {
let mut profile_data = Vec::with_capacity(1024);
let mut profile = File::open("src/tests/profile.zip").unwrap();
profile.read_to_end(&mut profile_data).unwrap();
Value::String(base64::encode(&profile_data))
}
// This is not a pretty test, mostly due to the nature of
// mozprofile's and MarionetteHandler's APIs, but we have had
// several regressions related to remote.log.level.
#[test]
fn test_remote_log_level() {
let mut profile = Profile::new(None).unwrap();
set_prefs(2828, &mut profile, false, vec![], false).ok();
let user_prefs = profile.user_prefs().unwrap();
let pref = user_prefs.get("remote.log.level").unwrap();
let value = match pref.value {
PrefValue::String(ref s) => s,
_ => panic!(),
};
for (i, ch) in value.chars().enumerate() {
if i == 0 {
assert!(ch.is_uppercase());
} else {
assert!(ch.is_lowercase());
}
}
}
#[test]
fn test_prefs() {
let marionette_settings = Default::default();
let encoded_profile = example_profile();
let mut prefs: Map<String, Value> = Map::new();
prefs.insert(
"browser.display.background_color".into(),
Value::String("#00ff00".into()),
);
let mut firefox_opts = Map::new();
firefox_opts.insert("profile".into(), encoded_profile);
firefox_opts.insert("prefs".into(), Value::Object(prefs));
let mut caps = Map::new();
caps.insert("moz:firefoxOptions".into(), Value::Object(firefox_opts));
let opts = FirefoxOptions::from_capabilities(None, &marionette_settings, &mut caps)
.expect("Valid profile and prefs");
let mut profile = match opts.profile {
ProfileType::Path(profile) => profile,
_ => panic!("Expected ProfileType::Path"),
};
set_prefs(2828, &mut profile, true, opts.prefs, false).expect("set preferences");
let prefs_set = profile.user_prefs().expect("valid user preferences");
println!("{:#?}", prefs_set.prefs);
assert_eq!(
prefs_set.get("startup.homepage_welcome_url"),
Some(&Pref::new("data:text/html,PASS"))
);
assert_eq!(
prefs_set.get("browser.display.background_color"),
Some(&Pref::new("#00ff00"))
);
assert_eq!(prefs_set.get("marionette.port"), Some(&Pref::new(2828)));
}
#[test]
fn test_pref_backup() {
let mut profile = Profile::new(None).unwrap();
// Create some prefs in the profile
let initial_prefs = profile.user_prefs().unwrap();
initial_prefs.insert("geckodriver.example", Pref::new("example"));
initial_prefs.write().unwrap();
let prefs_path = initial_prefs.path.clone();
let mut conflicting_backup_path = initial_prefs.path.clone();
conflicting_backup_path.set_extension("geckodriver_backup");
println!("{:?}", conflicting_backup_path);
let mut file = File::create(&conflicting_backup_path).unwrap();
file.write_all(b"test").unwrap();
assert!(conflicting_backup_path.exists());
let mut initial_prefs_data = String::new();
File::open(&prefs_path)
.expect("Initial prefs exist")
.read_to_string(&mut initial_prefs_data)
.unwrap();
let backup = set_prefs(2828, &mut profile, true, vec![], false)
.unwrap()
.unwrap();
let user_prefs = profile.user_prefs().unwrap();
assert!(user_prefs.path.exists());
let mut backup_path = user_prefs.path.clone();
backup_path.set_extension("geckodriver_backup_1");
assert!(backup_path.exists());
// Ensure the actual prefs contain both the existing ones and the ones we added
let pref = user_prefs.get("marionette.port").unwrap();
assert_eq!(pref.value, PrefValue::Int(2828));
let pref = user_prefs.get("geckodriver.example").unwrap();
assert_eq!(pref.value, PrefValue::String("example".into()));
// Ensure the backup prefs don't contain the new settings
let mut backup_data = String::new();
File::open(&backup_path)
.expect("Backup prefs exist")
.read_to_string(&mut backup_data)
.unwrap();
assert_eq!(backup_data, initial_prefs_data);
backup.restore();
assert!(!backup_path.exists());
let mut final_prefs_data = String::new();
File::open(&prefs_path)
.expect("Initial prefs exist")
.read_to_string(&mut final_prefs_data)
.unwrap();
assert_eq!(final_prefs_data, initial_prefs_data);
}
#[test]
fn test_local_read_marionette_port() {
fn create_port_file(profile_path: &Path, data: &[u8]) {
let port_path = profile_path.join("MarionetteActivePort");
let mut file = File::create(&port_path).unwrap();
file.write_all(data).unwrap();
}
let profile_dir = tempdir().unwrap();
let profile_path = profile_dir.path();
assert_eq!(read_marionette_port(profile_path), None);
assert_eq!(read_marionette_port(profile_path), None);
create_port_file(profile_path, b"");
assert_eq!(read_marionette_port(profile_path), None);
create_port_file(profile_path, b"1234");
assert_eq!(read_marionette_port(profile_path), Some(1234));
create_port_file(profile_path, b"1234abc");
assert_eq!(read_marionette_port(profile_path), None);
}
}

@ -0,0 +1,47 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use serde_json::Value;
use std::fmt;
include!(concat!(env!("OUT_DIR"), "/build-info.rs"));
pub struct BuildInfo;
impl BuildInfo {
pub fn version() -> &'static str {
crate_version!()
}
pub fn hash() -> Option<&'static str> {
COMMIT_HASH
}
pub fn date() -> Option<&'static str> {
COMMIT_DATE
}
}
impl fmt::Display for BuildInfo {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", BuildInfo::version())?;
match (BuildInfo::hash(), BuildInfo::date()) {
(Some(hash), Some(date)) => write!(f, " ({} {})", hash, date)?,
(Some(hash), None) => write!(f, " ({})", hash)?,
_ => {}
}
Ok(())
}
}
impl From<BuildInfo> for Value {
fn from(_: BuildInfo) -> Value {
Value::String(BuildInfo::version().to_string())
}
}
/// Returns build-time information about geckodriver.
pub fn build_info() -> BuildInfo {
BuildInfo {}
}

File diff suppressed because it is too large Load Diff

@ -0,0 +1,339 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use crate::logging;
use hyper::Method;
use serde::de::{self, Deserialize, Deserializer};
use serde_json::{self, Value};
use std::env;
use std::fs::File;
use std::io::prelude::*;
use uuid::Uuid;
use webdriver::command::{WebDriverCommand, WebDriverExtensionCommand};
use webdriver::error::WebDriverResult;
use webdriver::httpapi::WebDriverExtensionRoute;
use webdriver::Parameters;
pub fn extension_routes() -> Vec<(Method, &'static str, GeckoExtensionRoute)> {
vec![
(
Method::GET,
"/session/{sessionId}/moz/context",
GeckoExtensionRoute::GetContext,
),
(
Method::POST,
"/session/{sessionId}/moz/context",
GeckoExtensionRoute::SetContext,
),
(
Method::POST,
"/session/{sessionId}/moz/addon/install",
GeckoExtensionRoute::InstallAddon,
),
(
Method::POST,
"/session/{sessionId}/moz/addon/uninstall",
GeckoExtensionRoute::UninstallAddon,
),
(
Method::GET,
"/session/{sessionId}/moz/screenshot/full",
GeckoExtensionRoute::TakeFullScreenshot,
),
]
}
#[derive(Clone, PartialEq, Eq)]
pub enum GeckoExtensionRoute {
GetContext,
SetContext,
InstallAddon,
UninstallAddon,
TakeFullScreenshot,
}
impl WebDriverExtensionRoute for GeckoExtensionRoute {
type Command = GeckoExtensionCommand;
fn command(
&self,
_params: &Parameters,
body_data: &Value,
) -> WebDriverResult<WebDriverCommand<GeckoExtensionCommand>> {
use self::GeckoExtensionRoute::*;
let command = match *self {
GetContext => GeckoExtensionCommand::GetContext,
SetContext => {
GeckoExtensionCommand::SetContext(serde_json::from_value(body_data.clone())?)
}
InstallAddon => {
GeckoExtensionCommand::InstallAddon(serde_json::from_value(body_data.clone())?)
}
UninstallAddon => {
GeckoExtensionCommand::UninstallAddon(serde_json::from_value(body_data.clone())?)
}
TakeFullScreenshot => GeckoExtensionCommand::TakeFullScreenshot,
};
Ok(WebDriverCommand::Extension(command))
}
}
#[derive(Clone)]
pub enum GeckoExtensionCommand {
GetContext,
SetContext(GeckoContextParameters),
InstallAddon(AddonInstallParameters),
UninstallAddon(AddonUninstallParameters),
TakeFullScreenshot,
}
impl WebDriverExtensionCommand for GeckoExtensionCommand {
fn parameters_json(&self) -> Option<Value> {
use self::GeckoExtensionCommand::*;
match self {
GetContext => None,
InstallAddon(x) => Some(serde_json::to_value(x).unwrap()),
SetContext(x) => Some(serde_json::to_value(x).unwrap()),
UninstallAddon(x) => Some(serde_json::to_value(x).unwrap()),
TakeFullScreenshot => None,
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize)]
pub struct AddonInstallParameters {
pub path: String,
pub temporary: Option<bool>,
}
impl<'de> Deserialize<'de> for AddonInstallParameters {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
#[derive(Debug, Deserialize)]
#[serde(deny_unknown_fields)]
struct Base64 {
addon: String,
temporary: Option<bool>,
}
#[derive(Debug, Deserialize)]
#[serde(deny_unknown_fields)]
struct Path {
path: String,
temporary: Option<bool>,
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
enum Helper {
Base64(Base64),
Path(Path),
}
let params = match Helper::deserialize(deserializer)? {
Helper::Path(ref mut data) => AddonInstallParameters {
path: data.path.clone(),
temporary: data.temporary,
},
Helper::Base64(ref mut data) => {
let content = base64::decode(&data.addon).map_err(de::Error::custom)?;
let path = env::temp_dir()
.as_path()
.join(format!("addon-{}.xpi", Uuid::new_v4()));
let mut xpi_file = File::create(&path).map_err(de::Error::custom)?;
xpi_file
.write(content.as_slice())
.map_err(de::Error::custom)?;
let path = match path.to_str() {
Some(path) => path.to_string(),
None => return Err(de::Error::custom("could not write addon to file")),
};
AddonInstallParameters {
path,
temporary: data.temporary,
}
}
};
Ok(params)
}
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct AddonUninstallParameters {
pub id: String,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum GeckoContext {
Content,
Chrome,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct GeckoContextParameters {
pub context: GeckoContext,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct XblLocatorParameters {
pub name: String,
pub value: String,
}
#[derive(Default, Debug, PartialEq, Eq)]
pub struct LogOptions {
pub level: Option<logging::Level>,
}
#[cfg(test)]
mod tests {
use serde_json::json;
use super::*;
use crate::test::assert_de;
#[test]
fn test_json_addon_install_parameters_invalid() {
assert!(serde_json::from_str::<AddonInstallParameters>("").is_err());
assert!(serde_json::from_value::<AddonInstallParameters>(json!(null)).is_err());
assert!(serde_json::from_value::<AddonInstallParameters>(json!({})).is_err());
}
#[test]
fn test_json_addon_install_parameters_with_path_and_temporary() {
let params = AddonInstallParameters {
path: "/path/to.xpi".to_string(),
temporary: Some(true),
};
assert_de(&params, json!({"path": "/path/to.xpi", "temporary": true}));
}
#[test]
fn test_json_addon_install_parameters_with_path() {
let params = AddonInstallParameters {
path: "/path/to.xpi".to_string(),
temporary: None,
};
assert_de(&params, json!({"path": "/path/to.xpi"}));
}
#[test]
fn test_json_addon_install_parameters_with_path_invalid_type() {
let json = json!({"path": true, "temporary": true});
assert!(serde_json::from_value::<AddonInstallParameters>(json).is_err());
}
#[test]
fn test_json_addon_install_parameters_with_path_and_temporary_invalid_type() {
let json = json!({"path": "/path/to.xpi", "temporary": "foo"});
assert!(serde_json::from_value::<AddonInstallParameters>(json).is_err());
}
#[test]
fn test_json_addon_install_parameters_with_addon() {
let json = json!({"addon": "aGVsbG8=", "temporary": true});
let data = serde_json::from_value::<AddonInstallParameters>(json).unwrap();
assert_eq!(data.temporary, Some(true));
let mut file = File::open(data.path).unwrap();
let mut contents = String::new();
file.read_to_string(&mut contents).unwrap();
assert_eq!(contents, "hello");
}
#[test]
fn test_json_addon_install_parameters_with_addon_only() {
let json = json!({"addon": "aGVsbG8="});
let data = serde_json::from_value::<AddonInstallParameters>(json).unwrap();
assert_eq!(data.temporary, None);
let mut file = File::open(data.path).unwrap();
let mut contents = String::new();
file.read_to_string(&mut contents).unwrap();
assert_eq!(contents, "hello");
}
#[test]
fn test_json_addon_install_parameters_with_addon_invalid_type() {
let json = json!({"addon": true, "temporary": true});
assert!(serde_json::from_value::<AddonInstallParameters>(json).is_err());
}
#[test]
fn test_json_addon_install_parameters_with_addon_and_temporary_invalid_type() {
let json = json!({"addon": "aGVsbG8=", "temporary": "foo"});
assert!(serde_json::from_value::<AddonInstallParameters>(json).is_err());
}
#[test]
fn test_json_install_parameters_with_temporary_only() {
let json = json!({"temporary": true});
assert!(serde_json::from_value::<AddonInstallParameters>(json).is_err());
}
#[test]
fn test_json_addon_install_parameters_with_both_path_and_addon() {
let json = json!({
"path": "/path/to.xpi",
"addon": "aGVsbG8=",
"temporary": true,
});
assert!(serde_json::from_value::<AddonInstallParameters>(json).is_err());
}
#[test]
fn test_json_addon_uninstall_parameters_invalid() {
assert!(serde_json::from_str::<AddonUninstallParameters>("").is_err());
assert!(serde_json::from_value::<AddonUninstallParameters>(json!(null)).is_err());
assert!(serde_json::from_value::<AddonUninstallParameters>(json!({})).is_err());
}
#[test]
fn test_json_addon_uninstall_parameters() {
let params = AddonUninstallParameters {
id: "foo".to_string(),
};
assert_de(&params, json!({"id": "foo"}));
}
#[test]
fn test_json_addon_uninstall_parameters_id_invalid_type() {
let json = json!({"id": true});
assert!(serde_json::from_value::<AddonUninstallParameters>(json).is_err());
}
#[test]
fn test_json_gecko_context_parameters_content() {
let params = GeckoContextParameters {
context: GeckoContext::Content,
};
assert_de(&params, json!({"context": "content"}));
}
#[test]
fn test_json_gecko_context_parameters_chrome() {
let params = GeckoContextParameters {
context: GeckoContext::Chrome,
};
assert_de(&params, json!({"context": "chrome"}));
}
#[test]
fn test_json_gecko_context_parameters_context_invalid() {
type P = GeckoContextParameters;
assert!(serde_json::from_value::<P>(json!({})).is_err());
assert!(serde_json::from_value::<P>(json!({ "context": null })).is_err());
assert!(serde_json::from_value::<P>(json!({"context": "foo"})).is_err());
}
}

@ -0,0 +1,403 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Gecko-esque logger implementation for the [`log`] crate.
//!
//! The [`log`] crate provides a single logging API that abstracts over the
//! actual logging implementation. This module uses the logging API
//! to provide a log implementation that shares many aesthetical traits with
//! [Log.sys.mjs] from Gecko.
//!
//! Using the [`error!`], [`warn!`], [`info!`], [`debug!`], and
//! [`trace!`] macros from `log` will output a timestamp field, followed by the
//! log level, and then the message. The fields are separated by a tab
//! character, making the output suitable for further text processing with
//! `awk(1)`.
//!
//! This module shares the same API as `log`, except it provides additional
//! entry functions [`init`] and [`init_with_level`] and additional log levels
//! `Level::Fatal` and `Level::Config`. Converting these into the
//! [`log::Level`] is lossy so that `Level::Fatal` becomes `log::Level::Error`
//! and `Level::Config` becomes `log::Level::Debug`.
//!
//! [`log`]: https://docs.rs/log/newest/log/
//! [Log.sys.mjs]: https://searchfox.org/mozilla-central/source/toolkit/modules/Log.sys.mjs
//! [`error!`]: https://docs.rs/log/newest/log/macro.error.html
//! [`warn!`]: https://docs.rs/log/newest/log/macro.warn.html
//! [`info!`]: https://docs.rs/log/newest/log/macro.info.html
//! [`debug!`]: https://docs.rs/log/newest/log/macro.debug.html
//! [`trace!`]: https://docs.rs/log/newest/log/macro.trace.html
//! [`init`]: fn.init.html
//! [`init_with_level`]: fn.init_with_level.html
use std::fmt;
use std::io;
use std::io::Write;
use std::str;
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use unicode_segmentation::UnicodeSegmentation;
use mozprofile::preferences::Pref;
static LOG_TRUNCATE: AtomicBool = AtomicBool::new(true);
static MAX_LOG_LEVEL: AtomicUsize = AtomicUsize::new(0);
const MAX_STRING_LENGTH: usize = 250;
const LOGGED_TARGETS: &[&str] = &[
"geckodriver",
"mozdevice",
"mozprofile",
"mozrunner",
"mozversion",
"webdriver",
];
/// Logger levels from [Log.sys.mjs].
///
/// [Log.sys.mjs]: https://searchfox.org/mozilla-central/source/toolkit/modules/Log.sys.mjs
#[repr(usize)]
#[derive(Clone, Copy, Eq, Debug, Hash, PartialEq)]
pub enum Level {
Fatal = 70,
Error = 60,
Warn = 50,
Info = 40,
Config = 30,
Debug = 20,
Trace = 10,
}
impl From<usize> for Level {
fn from(n: usize) -> Level {
use self::Level::*;
match n {
70 => Fatal,
60 => Error,
50 => Warn,
40 => Info,
30 => Config,
20 => Debug,
10 => Trace,
_ => Info,
}
}
}
impl fmt::Display for Level {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::Level::*;
let s = match *self {
Fatal => "FATAL",
Error => "ERROR",
Warn => "WARN",
Info => "INFO",
Config => "CONFIG",
Debug => "DEBUG",
Trace => "TRACE",
};
write!(f, "{}", s)
}
}
impl str::FromStr for Level {
type Err = ();
fn from_str(s: &str) -> Result<Level, ()> {
use self::Level::*;
match s.to_lowercase().as_ref() {
"fatal" => Ok(Fatal),
"error" => Ok(Error),
"warn" => Ok(Warn),
"info" => Ok(Info),
"config" => Ok(Config),
"debug" => Ok(Debug),
"trace" => Ok(Trace),
_ => Err(()),
}
}
}
impl From<Level> for log::Level {
fn from(level: Level) -> log::Level {
use self::Level::*;
match level {
Fatal | Error => log::Level::Error,
Warn => log::Level::Warn,
Info => log::Level::Info,
Config | Debug => log::Level::Debug,
Trace => log::Level::Trace,
}
}
}
impl From<Level> for Pref {
fn from(level: Level) -> Pref {
use self::Level::*;
Pref::new(match level {
Fatal => "Fatal",
Error => "Error",
Warn => "Warn",
Info => "Info",
Config => "Config",
Debug => "Debug",
Trace => "Trace",
})
}
}
impl From<log::Level> for Level {
fn from(log_level: log::Level) -> Level {
use log::Level::*;
match log_level {
Error => Level::Error,
Warn => Level::Warn,
Info => Level::Info,
Debug => Level::Debug,
Trace => Level::Trace,
}
}
}
struct Logger;
impl log::Log for Logger {
fn enabled(&self, meta: &log::Metadata) -> bool {
LOGGED_TARGETS.iter().any(|&x| meta.target().starts_with(x))
&& meta.level() <= log::max_level()
}
fn log(&self, record: &log::Record) {
if self.enabled(record.metadata()) {
if let Some((s1, s2)) = truncate_message(record.args()) {
println!(
"{}\t{}\t{}\t{} ... {}",
format_ts(chrono::Local::now()),
record.target(),
record.level(),
s1,
s2
);
} else {
println!(
"{}\t{}\t{}\t{}",
format_ts(chrono::Local::now()),
record.target(),
record.level(),
record.args()
)
}
}
}
fn flush(&self) {
io::stdout().flush().unwrap();
}
}
/// Initialises the logging subsystem with the default log level.
pub fn init(truncate: bool) -> Result<(), log::SetLoggerError> {
init_with_level(Level::Info, truncate)
}
/// Initialises the logging subsystem.
pub fn init_with_level(level: Level, truncate: bool) -> Result<(), log::SetLoggerError> {
let logger = Logger {};
set_max_level(level);
set_truncate(truncate);
log::set_boxed_logger(Box::new(logger))?;
Ok(())
}
/// Returns the current maximum log level.
pub fn max_level() -> Level {
MAX_LOG_LEVEL.load(Ordering::Relaxed).into()
}
/// Sets the global maximum log level.
pub fn set_max_level(level: Level) {
MAX_LOG_LEVEL.store(level as usize, Ordering::SeqCst);
let slevel: log::Level = level.into();
log::set_max_level(slevel.to_level_filter())
}
/// Sets the global maximum log level.
pub fn set_truncate(truncate: bool) {
LOG_TRUNCATE.store(truncate, Ordering::SeqCst);
}
/// Returns the truncation flag.
pub fn truncate() -> bool {
LOG_TRUNCATE.load(Ordering::Relaxed)
}
/// Produces a 13-digit Unix Epoch timestamp similar to Gecko.
fn format_ts(ts: chrono::DateTime<chrono::Local>) -> String {
format!("{}{:03}", ts.timestamp(), ts.timestamp_subsec_millis())
}
/// Truncate a log message if it's too long
fn truncate_message(args: &fmt::Arguments) -> Option<(String, String)> {
// Don't truncate the message if requested.
if !truncate() {
return None;
}
let message = format!("{}", args);
let chars = message.graphemes(true).collect::<Vec<&str>>();
if chars.len() > MAX_STRING_LENGTH {
let middle: usize = MAX_STRING_LENGTH / 2;
let s1 = chars[0..middle].concat();
let s2 = chars[chars.len() - middle..].concat();
Some((s1, s2))
} else {
None
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::str::FromStr;
use std::sync::Mutex;
use mozprofile::preferences::{Pref, PrefValue};
lazy_static! {
static ref LEVEL_MUTEX: Mutex<()> = Mutex::new(());
}
#[test]
fn test_level_repr() {
assert_eq!(Level::Fatal as usize, 70);
assert_eq!(Level::Error as usize, 60);
assert_eq!(Level::Warn as usize, 50);
assert_eq!(Level::Info as usize, 40);
assert_eq!(Level::Config as usize, 30);
assert_eq!(Level::Debug as usize, 20);
assert_eq!(Level::Trace as usize, 10);
}
#[test]
fn test_level_from_log() {
assert_eq!(Level::from(log::Level::Error), Level::Error);
assert_eq!(Level::from(log::Level::Warn), Level::Warn);
assert_eq!(Level::from(log::Level::Info), Level::Info);
assert_eq!(Level::from(log::Level::Debug), Level::Debug);
assert_eq!(Level::from(log::Level::Trace), Level::Trace);
}
#[test]
fn test_level_into_log() {
assert_eq!(Into::<log::Level>::into(Level::Fatal), log::Level::Error);
assert_eq!(Into::<log::Level>::into(Level::Error), log::Level::Error);
assert_eq!(Into::<log::Level>::into(Level::Warn), log::Level::Warn);
assert_eq!(Into::<log::Level>::into(Level::Info), log::Level::Info);
assert_eq!(Into::<log::Level>::into(Level::Config), log::Level::Debug);
assert_eq!(Into::<log::Level>::into(Level::Debug), log::Level::Debug);
assert_eq!(Into::<log::Level>::into(Level::Trace), log::Level::Trace);
}
#[test]
fn test_level_into_pref() {
let tests = [
(Level::Fatal, "Fatal"),
(Level::Error, "Error"),
(Level::Warn, "Warn"),
(Level::Info, "Info"),
(Level::Config, "Config"),
(Level::Debug, "Debug"),
(Level::Trace, "Trace"),
];
for &(lvl, s) in tests.iter() {
let expected = Pref {
value: PrefValue::String(s.to_string()),
sticky: false,
};
assert_eq!(Into::<Pref>::into(lvl), expected);
}
}
#[test]
fn test_level_from_str() {
assert_eq!(Level::from_str("fatal"), Ok(Level::Fatal));
assert_eq!(Level::from_str("error"), Ok(Level::Error));
assert_eq!(Level::from_str("warn"), Ok(Level::Warn));
assert_eq!(Level::from_str("info"), Ok(Level::Info));
assert_eq!(Level::from_str("config"), Ok(Level::Config));
assert_eq!(Level::from_str("debug"), Ok(Level::Debug));
assert_eq!(Level::from_str("trace"), Ok(Level::Trace));
assert_eq!(Level::from_str("INFO"), Ok(Level::Info));
assert!(Level::from_str("foo").is_err());
}
#[test]
fn test_level_to_str() {
assert_eq!(Level::Fatal.to_string(), "FATAL");
assert_eq!(Level::Error.to_string(), "ERROR");
assert_eq!(Level::Warn.to_string(), "WARN");
assert_eq!(Level::Info.to_string(), "INFO");
assert_eq!(Level::Config.to_string(), "CONFIG");
assert_eq!(Level::Debug.to_string(), "DEBUG");
assert_eq!(Level::Trace.to_string(), "TRACE");
}
#[test]
fn test_max_level() {
let _guard = LEVEL_MUTEX.lock();
set_max_level(Level::Info);
assert_eq!(max_level(), Level::Info);
}
#[test]
fn test_set_max_level() {
let _guard = LEVEL_MUTEX.lock();
set_max_level(Level::Error);
assert_eq!(max_level(), Level::Error);
set_max_level(Level::Fatal);
assert_eq!(max_level(), Level::Fatal);
}
#[test]
fn test_init_with_level() {
let _guard = LEVEL_MUTEX.lock();
init_with_level(Level::Debug, false).unwrap();
assert_eq!(max_level(), Level::Debug);
assert!(init_with_level(Level::Warn, false).is_err());
}
#[test]
fn test_format_ts() {
let ts = chrono::Local::now();
let s = format_ts(ts);
assert_eq!(s.len(), 13);
}
#[test]
fn test_truncate() {
let short_message = (0..MAX_STRING_LENGTH).map(|_| "x").collect::<String>();
// A message up to MAX_STRING_LENGTH is not truncated
assert_eq!(truncate_message(&format_args!("{}", short_message)), None);
let long_message = (0..MAX_STRING_LENGTH + 1).map(|_| "x").collect::<String>();
let part = (0..MAX_STRING_LENGTH / 2).map(|_| "x").collect::<String>();
// A message longer than MAX_STRING_LENGTH is not truncated if requested
set_truncate(false);
assert_eq!(truncate_message(&format_args!("{}", long_message)), None);
// A message longer than MAX_STRING_LENGTH is truncated if requested
set_truncate(true);
assert_eq!(
truncate_message(&format_args!("{}", long_message)),
Some((part.to_owned(), part))
);
}
}

@ -0,0 +1,549 @@
#![forbid(unsafe_code)]
extern crate chrono;
#[macro_use]
extern crate clap;
#[macro_use]
extern crate lazy_static;
extern crate hyper;
extern crate marionette as marionette_rs;
extern crate mozdevice;
extern crate mozprofile;
extern crate mozrunner;
extern crate mozversion;
extern crate regex;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
extern crate serde_yaml;
extern crate tempfile;
extern crate url;
extern crate uuid;
extern crate webdriver;
extern crate zip;
#[macro_use]
extern crate log;
use std::env;
use std::fmt;
use std::io;
use std::net::{IpAddr, SocketAddr, ToSocketAddrs};
use std::path::PathBuf;
use std::result;
use std::str::FromStr;
use clap::{AppSettings, Arg, Command};
macro_rules! try_opt {
($expr:expr, $err_type:expr, $err_msg:expr) => {{
match $expr {
Some(x) => x,
None => return Err(WebDriverError::new($err_type, $err_msg)),
}
}};
}
mod android;
mod browser;
mod build;
mod capabilities;
mod command;
mod logging;
mod marionette;
mod prefs;
#[cfg(test)]
pub mod test;
use crate::command::extension_routes;
use crate::logging::Level;
use crate::marionette::{MarionetteHandler, MarionetteSettings};
use mozdevice::AndroidStorageInput;
use url::{Host, Url};
const EXIT_SUCCESS: i32 = 0;
const EXIT_USAGE: i32 = 64;
const EXIT_UNAVAILABLE: i32 = 69;
enum FatalError {
Parsing(clap::Error),
Usage(String),
Server(io::Error),
}
impl FatalError {
fn exit_code(&self) -> i32 {
use FatalError::*;
match *self {
Parsing(_) | Usage(_) => EXIT_USAGE,
Server(_) => EXIT_UNAVAILABLE,
}
}
fn help_included(&self) -> bool {
matches!(*self, FatalError::Parsing(_))
}
}
impl From<clap::Error> for FatalError {
fn from(err: clap::Error) -> FatalError {
FatalError::Parsing(err)
}
}
impl From<io::Error> for FatalError {
fn from(err: io::Error) -> FatalError {
FatalError::Server(err)
}
}
// harmonise error message from clap to avoid duplicate "error:" prefix
impl fmt::Display for FatalError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use FatalError::*;
let s = match *self {
Parsing(ref err) => err.to_string(),
Usage(ref s) => format!("error: {}", s),
Server(ref err) => format!("error: {}", err),
};
write!(f, "{}", s)
}
}
macro_rules! usage {
($msg:expr) => {
return Err(FatalError::Usage($msg.to_string()))
};
($fmt:expr, $($arg:tt)+) => {
return Err(FatalError::Usage(format!($fmt, $($arg)+)))
};
}
type ProgramResult<T> = result::Result<T, FatalError>;
#[allow(clippy::large_enum_variant)]
enum Operation {
Help,
Version,
Server {
log_level: Option<Level>,
log_truncate: bool,
address: SocketAddr,
allow_hosts: Vec<Host>,
allow_origins: Vec<Url>,
settings: MarionetteSettings,
deprecated_storage_arg: bool,
},
}
/// Get a socket address from the provided host and port
///
/// # Arguments
/// * `webdriver_host` - The hostname on which the server will listen
/// * `webdriver_port` - The port on which the server will listen
///
/// When the host and port resolve to multiple addresses, prefer
/// IPv4 addresses vs IPv6.
fn server_address(webdriver_host: &str, webdriver_port: u16) -> ProgramResult<SocketAddr> {
let mut socket_addrs = match format!("{}:{}", webdriver_host, webdriver_port).to_socket_addrs()
{
Ok(addrs) => addrs.collect::<Vec<_>>(),
Err(e) => usage!("{}: {}:{}", e, webdriver_host, webdriver_port),
};
if socket_addrs.is_empty() {
usage!(
"Unable to resolve host: {}:{}",
webdriver_host,
webdriver_port
)
}
// Prefer ipv4 address
socket_addrs.sort_by(|a, b| {
let a_val = i32::from(!a.ip().is_ipv4());
let b_val = i32::from(!b.ip().is_ipv4());
a_val.partial_cmp(&b_val).expect("Comparison failed")
});
Ok(socket_addrs.remove(0))
}
/// Parse a given string into a Host
fn parse_hostname(webdriver_host: &str) -> Result<Host, url::ParseError> {
let host_str = if let Ok(ip_addr) = IpAddr::from_str(webdriver_host) {
// In this case we have an IP address as the host
if ip_addr.is_ipv6() {
// Convert to quoted form
format!("[{}]", &webdriver_host)
} else {
webdriver_host.into()
}
} else {
webdriver_host.into()
};
Host::parse(&host_str)
}
/// Get a list of default hostnames to allow
///
/// This only covers domain names, not IP addresses, since IP adresses
/// are always accepted.
fn get_default_allowed_hosts(ip: IpAddr) -> Vec<Result<Host, url::ParseError>> {
let localhost_is_loopback = ("localhost".to_string(), 80)
.to_socket_addrs()
.map(|addr_iter| {
addr_iter
.map(|addr| addr.ip())
.filter(|ip| ip.is_loopback())
})
.iter()
.len()
> 0;
if ip.is_loopback() && localhost_is_loopback {
vec![Host::parse("localhost")]
} else {
vec![]
}
}
fn get_allowed_hosts(
host: Host,
allow_hosts: Option<clap::Values>,
) -> Result<Vec<Host>, url::ParseError> {
allow_hosts
.map(|hosts| hosts.map(Host::parse).collect::<Vec<_>>())
.unwrap_or_else(|| match host {
Host::Domain(_) => {
vec![Ok(host.clone())]
}
Host::Ipv4(ip) => get_default_allowed_hosts(IpAddr::V4(ip)),
Host::Ipv6(ip) => get_default_allowed_hosts(IpAddr::V6(ip)),
})
.into_iter()
.collect::<Result<Vec<Host>, url::ParseError>>()
}
fn get_allowed_origins(allow_origins: Option<clap::Values>) -> Result<Vec<Url>, url::ParseError> {
allow_origins
.map(|origins| {
origins
.map(Url::parse)
.collect::<Result<Vec<Url>, url::ParseError>>()
})
.unwrap_or_else(|| Ok(vec![]))
}
fn parse_args(cmd: &mut Command) -> ProgramResult<Operation> {
let args = cmd.try_get_matches_from_mut(env::args())?;
if args.is_present("help") {
return Ok(Operation::Help);
} else if args.is_present("version") {
return Ok(Operation::Version);
}
let log_level = if args.is_present("log_level") {
Level::from_str(args.value_of("log_level").unwrap()).ok()
} else {
Some(match args.occurrences_of("verbosity") {
0 => Level::Info,
1 => Level::Debug,
_ => Level::Trace,
})
};
let webdriver_host = args.value_of("webdriver_host").unwrap();
let webdriver_port = {
let s = args.value_of("webdriver_port").unwrap();
match u16::from_str(s) {
Ok(n) => n,
Err(e) => usage!("invalid --port: {}: {}", e, s),
}
};
let android_storage = args
.value_of_t::<AndroidStorageInput>("android_storage")
.unwrap_or(AndroidStorageInput::Auto);
let binary = args.value_of("binary").map(PathBuf::from);
let profile_root = args.value_of("profile_root").map(PathBuf::from);
// Try to create a temporary directory on startup to check that the directory exists and is writable
{
let tmp_dir = if let Some(ref tmp_root) = profile_root {
tempfile::tempdir_in(tmp_root)
} else {
tempfile::tempdir()
};
if tmp_dir.is_err() {
usage!("Unable to write to temporary directory; consider --profile-root with a writeable directory")
}
}
let marionette_host = args.value_of("marionette_host").unwrap();
let marionette_port = match args.value_of("marionette_port") {
Some(s) => match u16::from_str(s) {
Ok(n) => Some(n),
Err(e) => usage!("invalid --marionette-port: {}", e),
},
None => None,
};
// For Android the port on the device must be the same as the one on the
// host. For now default to 9222, which is the default for --remote-debugging-port.
let websocket_port = match args.value_of("websocket_port") {
Some(s) => match u16::from_str(s) {
Ok(n) => n,
Err(e) => usage!("invalid --websocket-port: {}", e),
},
None => 9222,
};
let host = match parse_hostname(webdriver_host) {
Ok(name) => name,
Err(e) => usage!("invalid --host {}: {}", webdriver_host, e),
};
let allow_hosts = match get_allowed_hosts(host, args.values_of("allow_hosts")) {
Ok(hosts) => hosts,
Err(e) => usage!("invalid --allow-hosts {}", e),
};
let allow_origins = match get_allowed_origins(args.values_of("allow_origins")) {
Ok(origins) => origins,
Err(e) => usage!("invalid --allow-origins {}", e),
};
let address = server_address(webdriver_host, webdriver_port)?;
let settings = MarionetteSettings {
binary,
profile_root,
connect_existing: args.is_present("connect_existing"),
host: marionette_host.into(),
port: marionette_port,
websocket_port,
allow_hosts: allow_hosts.clone(),
allow_origins: allow_origins.clone(),
jsdebugger: args.is_present("jsdebugger"),
android_storage,
};
Ok(Operation::Server {
log_level,
log_truncate: !args.is_present("log_no_truncate"),
allow_hosts,
allow_origins,
address,
settings,
deprecated_storage_arg: args.is_present("android_storage"),
})
}
fn inner_main(cmd: &mut Command) -> ProgramResult<()> {
match parse_args(cmd)? {
Operation::Help => print_help(cmd),
Operation::Version => print_version(),
Operation::Server {
log_level,
log_truncate,
address,
allow_hosts,
allow_origins,
settings,
deprecated_storage_arg,
} => {
if let Some(ref level) = log_level {
logging::init_with_level(*level, log_truncate).unwrap();
} else {
logging::init(log_truncate).unwrap();
}
if deprecated_storage_arg {
warn!("--android-storage argument is deprecated and will be removed soon.");
};
let handler = MarionetteHandler::new(settings);
let listening = webdriver::server::start(
address,
allow_hosts,
allow_origins,
handler,
extension_routes(),
)?;
info!("Listening on {}", listening.socket);
}
}
Ok(())
}
fn main() {
use std::process::exit;
let mut cmd = make_command();
// use std::process:Termination when it graduates
exit(match inner_main(&mut cmd) {
Ok(_) => EXIT_SUCCESS,
Err(e) => {
eprintln!("{}: {}", get_program_name(), e);
if !e.help_included() {
print_help(&mut cmd);
}
e.exit_code()
}
});
}
fn make_command<'a>() -> Command<'a> {
Command::new(format!("geckodriver {}", build::build_info()))
.setting(AppSettings::NoAutoHelp)
.setting(AppSettings::NoAutoVersion)
.about("WebDriver implementation for Firefox")
.arg(
Arg::new("webdriver_host")
.long("host")
.takes_value(true)
.value_name("HOST")
.default_value("127.0.0.1")
.help("Host IP to use for WebDriver server"),
)
.arg(
Arg::new("webdriver_port")
.short('p')
.long("port")
.takes_value(true)
.value_name("PORT")
.default_value("4444")
.help("Port to use for WebDriver server"),
)
.arg(
Arg::new("binary")
.short('b')
.long("binary")
.takes_value(true)
.value_name("BINARY")
.help("Path to the Firefox binary"),
)
.arg(
Arg::new("marionette_host")
.long("marionette-host")
.takes_value(true)
.value_name("HOST")
.default_value("127.0.0.1")
.help("Host to use to connect to Gecko"),
)
.arg(
Arg::new("marionette_port")
.long("marionette-port")
.takes_value(true)
.value_name("PORT")
.help("Port to use to connect to Gecko [default: system-allocated port]"),
)
.arg(
Arg::new("websocket_port")
.long("websocket-port")
.takes_value(true)
.value_name("PORT")
.conflicts_with("connect_existing")
.help("Port to use to connect to WebDriver BiDi [default: 9222]"),
)
.arg(
Arg::new("connect_existing")
.long("connect-existing")
.requires("marionette_port")
.help("Connect to an existing Firefox instance"),
)
.arg(
Arg::new("jsdebugger")
.long("jsdebugger")
.help("Attach browser toolbox debugger for Firefox"),
)
.arg(
Arg::new("verbosity")
.multiple_occurrences(true)
.conflicts_with("log_level")
.short('v')
.help("Log level verbosity (-v for debug and -vv for trace level)"),
)
.arg(
Arg::new("log_level")
.long("log")
.takes_value(true)
.value_name("LEVEL")
.possible_values(["fatal", "error", "warn", "info", "config", "debug", "trace"])
.help("Set Gecko log level"),
)
.arg(
Arg::new("log_no_truncate")
.long("log-no-truncate")
.help("Disable truncation of long log lines"),
)
.arg(
Arg::new("help")
.short('h')
.long("help")
.help("Prints this message"),
)
.arg(
Arg::new("version")
.short('V')
.long("version")
.help("Prints version and copying information"),
)
.arg(
Arg::new("profile_root")
.long("profile-root")
.takes_value(true)
.value_name("PROFILE_ROOT")
.help("Directory in which to create profiles. Defaults to the system temporary directory."),
)
.arg(
Arg::new("android_storage")
.long("android-storage")
.possible_values(["auto", "app", "internal", "sdcard"])
.value_name("ANDROID_STORAGE")
.help("Selects storage location to be used for test data (deprecated)."),
)
.arg(
Arg::new("allow_hosts")
.long("allow-hosts")
.takes_value(true)
.multiple_values(true)
.value_name("ALLOW_HOSTS")
.help("List of hostnames to allow. By default the value of --host is allowed, and in addition if that's a well known local address, other variations on well known local addresses are allowed. If --allow-hosts is provided only exactly those hosts are allowed."),
)
.arg(
Arg::new("allow_origins")
.long("allow-origins")
.takes_value(true)
.multiple_values(true)
.value_name("ALLOW_ORIGINS")
.help("List of request origins to allow. These must be formatted as scheme://host:port. By default any request with an origin header is rejected. If --allow-origins is provided then only exactly those origins are allowed."),
)
}
fn get_program_name() -> String {
env::args().next().unwrap()
}
fn print_help(cmd: &mut Command) {
cmd.print_help().ok();
println!();
}
fn print_version() {
println!("geckodriver {}", build::build_info());
println!();
println!("The source code of this program is available from");
println!("testing/geckodriver in https://hg.mozilla.org/mozilla-central.");
println!();
println!("This program is subject to the terms of the Mozilla Public License 2.0.");
println!("You can obtain a copy of the license at https://mozilla.org/MPL/2.0/.");
}

File diff suppressed because it is too large Load Diff

@ -0,0 +1,158 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use mozprofile::preferences::Pref;
// ALL CHANGES TO THIS FILE MUST HAVE REVIEW FROM A GECKODRIVER PEER!
//
// Please refer to INSTRUCTIONS TO ADD A NEW PREFERENCE in
// remote/shared/RecommendedPreferences.sys.mjs
//
// Note: geckodriver is used out-of-tree with various builds of Firefox.
// Removing a preference from this file will cause regressions,
// so please be careful and get review from a Testing :: geckodriver peer
// before you make any changes to this file.
lazy_static! {
pub static ref DEFAULT: Vec<(&'static str, Pref)> = vec![
// Make sure Shield doesn't hit the network.
("app.normandy.api_url", Pref::new("")),
// Disable Firefox old build background check
("app.update.checkInstallTime", Pref::new(false)),
// Disable automatically upgrading Firefox
//
// Note: Possible update tests could reset or flip the value to allow
// updates to be downloaded and applied.
("app.update.disabledForTesting", Pref::new(true)),
// Enable the dump function, which sends messages to the system
// console
("browser.dom.window.dump.enabled", Pref::new(true)),
("devtools.console.stdout.chrome", Pref::new(true)),
// Disable safebrowsing components
("browser.safebrowsing.blockedURIs.enabled", Pref::new(false)),
("browser.safebrowsing.downloads.enabled", Pref::new(false)),
("browser.safebrowsing.passwords.enabled", Pref::new(false)),
("browser.safebrowsing.malware.enabled", Pref::new(false)),
("browser.safebrowsing.phishing.enabled", Pref::new(false)),
// Do not restore the last open set of tabs if the browser crashed
("browser.sessionstore.resume_from_crash", Pref::new(false)),
// Skip check for default browser on startup
("browser.shell.checkDefaultBrowser", Pref::new(false)),
// Do not redirect user when a milestone upgrade of Firefox
// is detected
("browser.startup.homepage_override.mstone", Pref::new("ignore")),
// Start with a blank page (about:blank)
("browser.startup.page", Pref::new(0)),
// Disable the UI tour
("browser.uitour.enabled", Pref::new(false)),
// Do not warn on quitting Firefox
("browser.warnOnQuit", Pref::new(false)),
// Defensively disable data reporting systems
("datareporting.healthreport.documentServerURI", Pref::new("http://%(server)s/dummy/healthreport/")),
("datareporting.healthreport.logging.consoleEnabled", Pref::new(false)),
("datareporting.healthreport.service.enabled", Pref::new(false)),
("datareporting.healthreport.service.firstRun", Pref::new(false)),
("datareporting.healthreport.uploadEnabled", Pref::new(false)),
// Do not show datareporting policy notifications which can
// interfere with tests
("datareporting.policy.dataSubmissionEnabled", Pref::new(false)),
("datareporting.policy.dataSubmissionPolicyBypassNotification", Pref::new(true)),
// Disable the ProcessHangMonitor
("dom.ipc.reportProcessHangs", Pref::new(false)),
// Only load extensions from the application and user profile
// AddonManager.SCOPE_PROFILE + AddonManager.SCOPE_APPLICATION
("extensions.autoDisableScopes", Pref::new(0)),
("extensions.enabledScopes", Pref::new(5)),
// Disable intalling any distribution extensions or add-ons
("extensions.installDistroAddons", Pref::new(false)),
// Turn off extension updates so they do not bother tests
("extensions.update.enabled", Pref::new(false)),
("extensions.update.notifyUser", Pref::new(false)),
// Allow the application to have focus even it runs in the
// background
("focusmanager.testmode", Pref::new(true)),
// Disable useragent updates
("general.useragent.updates.enabled", Pref::new(false)),
// Always use network provider for geolocation tests so we bypass
// the macOS dialog raised by the corelocation provider
("geo.provider.testing", Pref::new(true)),
// Do not scan wi-fi
("geo.wifi.scan", Pref::new(false)),
// No hang monitor
("hangmonitor.timeout", Pref::new(0)),
// Disable idle-daily notifications to avoid expensive operations
// that may cause unexpected test timeouts.
("idle.lastDailyNotification", Pref::new(-1)),
// Disable download and usage of OpenH264, and Widevine plugins
("media.gmp-manager.updateEnabled", Pref::new(false)),
// Disable the GFX sanity window
("media.sanity-test.disabled", Pref::new(true)),
// Do not automatically switch between offline and online
("network.manage-offline-status", Pref::new(false)),
// Make sure SNTP requests do not hit the network
("network.sntp.pools", Pref::new("%(server)s")),
// Disable Flash. The plugin container it is run in is
// causing problems when quitting Firefox from geckodriver,
// c.f. https://github.com/mozilla/geckodriver/issues/225.
("plugin.state.flash", Pref::new(0)),
// Don't do network connections for mitm priming
("security.certerrors.mitm.priming.enabled", Pref::new(false)),
// Ensure blocklist updates don't hit the network
("services.settings.server", Pref::new("")),
// Disable first run pages
("startup.homepage_welcome_url", Pref::new("about:blank")),
("startup.homepage_welcome_url.additional", Pref::new("")),
// asrouter expects a plain object or null
("browser.newtabpage.activity-stream.asrouter.providers.cfr", Pref::new("null")),
// TODO: Remove once minimum supported Firefox release is 93.
("browser.newtabpage.activity-stream.asrouter.providers.cfr-fxa", Pref::new("null")),
("browser.newtabpage.activity-stream.asrouter.providers.snippets", Pref::new("null")),
("browser.newtabpage.activity-stream.asrouter.providers.message-groups", Pref::new("null")),
("browser.newtabpage.activity-stream.asrouter.providers.whats-new-panel", Pref::new("null")),
("browser.newtabpage.activity-stream.asrouter.providers.messaging-experiments", Pref::new("null")),
("browser.newtabpage.activity-stream.feeds.system.topstories", Pref::new(false)),
("browser.newtabpage.activity-stream.feeds.snippets", Pref::new(false)),
("browser.newtabpage.activity-stream.tippyTop.service.endpoint", Pref::new("")),
("browser.newtabpage.activity-stream.discoverystream.config", Pref::new("[]")),
// For Activity Stream firstrun page, use an empty string to avoid fetching.
("browser.newtabpage.activity-stream.fxaccounts.endpoint", Pref::new("")),
// Prevent starting into safe mode after application crashes
("toolkit.startup.max_resumed_crashes", Pref::new(-1)),
// Disable webapp updates.
("browser.webapps.checkForUpdates", Pref::new(0)),
];
}

@ -0,0 +1,12 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
pub fn assert_de<T>(data: &T, json: serde_json::Value)
where
T: std::fmt::Debug,
T: std::cmp::PartialEq,
T: serde::de::DeserializeOwned,
{
assert_eq!(data, &serde_json::from_value::<T>(json).unwrap());
}

@ -0,0 +1,738 @@
@Switch01
A_Rog
Aakanksha Agrawal
Abhinav Sagar
ABHYUDAY PRATAP SINGH
abs51295
AceGentile
Adam Chainz
Adam Tse
Adam Wentz
admin
Adrien Morison
ahayrapetyan
Ahilya
AinsworthK
Akash Srivastava
Alan Yee
Albert Tugushev
Albert-Guan
albertg
Alberto Sottile
Aleks Bunin
Alethea Flowers
Alex Gaynor
Alex Grönholm
Alex Hedges
Alex Loosley
Alex Morega
Alex Stachowiak
Alexander Shtyrov
Alexandre Conrad
Alexey Popravka
Alli
Ami Fischman
Ananya Maiti
Anatoly Techtonik
Anders Kaseorg
Andre Aguiar
Andreas Lutro
Andrei Geacar
Andrew Gaul
Andrew Shymanel
Andrey Bienkowski
Andrey Bulgakov
Andrés Delfino
Andy Freeland
Andy Kluger
Ani Hayrapetyan
Aniruddha Basak
Anish Tambe
Anrs Hu
Anthony Sottile
Antoine Musso
Anton Ovchinnikov
Anton Patrushev
Antonio Alvarado Hernandez
Antony Lee
Antti Kaihola
Anubhav Patel
Anudit Nagar
Anuj Godase
AQNOUCH Mohammed
AraHaan
Arindam Choudhury
Armin Ronacher
Artem
Arun Babu Neelicattu
Ashley Manton
Ashwin Ramaswami
atse
Atsushi Odagiri
Avinash Karhana
Avner Cohen
Awit (Ah-Wit) Ghirmai
Baptiste Mispelon
Barney Gale
barneygale
Bartek Ogryczak
Bastian Venthur
Ben Bodenmiller
Ben Darnell
Ben Hoyt
Ben Mares
Ben Rosser
Bence Nagy
Benjamin Peterson
Benjamin VanEvery
Benoit Pierre
Berker Peksag
Bernard
Bernard Tyers
Bernardo B. Marques
Bernhard M. Wiedemann
Bertil Hatt
Bhavam Vidyarthi
Blazej Michalik
Bogdan Opanchuk
BorisZZZ
Brad Erickson
Bradley Ayers
Brandon L. Reiss
Brandt Bucher
Brett Randall
Brett Rosen
Brian Cristante
Brian Rosner
briantracy
BrownTruck
Bruno Oliveira
Bruno Renié
Bruno S
Bstrdsmkr
Buck Golemon
burrows
Bussonnier Matthias
bwoodsend
c22
Caleb Martinez
Calvin Smith
Carl Meyer
Carlos Liam
Carol Willing
Carter Thayer
Cass
Chandrasekhar Atina
Chih-Hsuan Yen
Chris Brinker
Chris Hunt
Chris Jerdonek
Chris Kuehl
Chris McDonough
Chris Pawley
Chris Pryer
Chris Wolfe
Christian Clauss
Christian Heimes
Christian Oudard
Christoph Reiter
Christopher Hunt
Christopher Snyder
cjc7373
Clark Boylan
Claudio Jolowicz
Clay McClure
Cody
Cody Soyland
Colin Watson
Collin Anderson
Connor Osborn
Cooper Lees
Cooper Ry Lees
Cory Benfield
Cory Wright
Craig Kerstiens
Cristian Sorinel
Cristina
Cristina Muñoz
Curtis Doty
cytolentino
Daan De Meyer
Damian
Damian Quiroga
Damian Shaw
Dan Black
Dan Savilonis
Dan Sully
Dane Hillard
daniel
Daniel Collins
Daniel Hahler
Daniel Holth
Daniel Jost
Daniel Katz
Daniel Shaulov
Daniele Esposti
Daniele Nicolodi
Daniele Procida
Daniil Konovalenko
Danny Hermes
Danny McClanahan
Darren Kavanagh
Dav Clark
Dave Abrahams
Dave Jones
David Aguilar
David Black
David Bordeynik
David Caro
David D Lowe
David Evans
David Hewitt
David Linke
David Poggi
David Pursehouse
David Runge
David Tucker
David Wales
Davidovich
Deepak Sharma
Deepyaman Datta
Denise Yu
derwolfe
Desetude
Devesh Kumar Singh
Diego Caraballo
Diego Ramirez
DiegoCaraballo
Dimitri Merejkowsky
Dimitri Papadopoulos
Dirk Stolle
Dmitry Gladkov
Dmitry Volodin
Domen Kožar
Dominic Davis-Foster
Donald Stufft
Dongweiming
doron zarhi
Dos Moonen
Douglas Thor
DrFeathers
Dustin Ingram
Dwayne Bailey
Ed Morley
Edgar Ramírez
Ee Durbin
Eitan Adler
ekristina
elainechan
Eli Schwartz
Elisha Hollander
Ellen Marie Dash
Emil Burzo
Emil Styrke
Emmanuel Arias
Endoh Takanao
enoch
Erdinc Mutlu
Eric Cousineau
Eric Gillingham
Eric Hanchrow
Eric Hopper
Erik M. Bray
Erik Rose
Erwin Janssen
Eugene Vereshchagin
everdimension
Federico
Felipe Peter
Felix Yan
fiber-space
Filip Kokosiński
Filipe Laíns
Finn Womack
finnagin
Florian Briand
Florian Rathgeber
Francesco
Francesco Montesano
Frost Ming
Gabriel Curio
Gabriel de Perthuis
Garry Polley
gavin
gdanielson
Geoffrey Sneddon
George Song
Georgi Valkov
Georgy Pchelkin
ghost
Giftlin Rajaiah
gizmoguy1
gkdoc
Godefroid Chapelle
Gopinath M
GOTO Hayato
gousaiyang
gpiks
Greg Roodt
Greg Ward
Guilherme Espada
Guillaume Seguin
gutsytechster
Guy Rozendorn
Guy Tuval
gzpan123
Hanjun Kim
Hari Charan
Harsh Vardhan
harupy
Harutaka Kawamura
hauntsaninja
Henrich Hartzer
Henry Schreiner
Herbert Pfennig
Holly Stotelmyer
Honnix
Hsiaoming Yang
Hugo Lopes Tavares
Hugo van Kemenade
Hugues Bruant
Hynek Schlawack
Ian Bicking
Ian Cordasco
Ian Lee
Ian Stapleton Cordasco
Ian Wienand
Igor Kuzmitshov
Igor Sobreira
Ilan Schnell
Illia Volochii
Ilya Baryshev
Inada Naoki
Ionel Cristian Mărieș
Ionel Maries Cristian
Ivan Pozdeev
Jacob Kim
Jacob Walls
Jaime Sanz
jakirkham
Jakub Kuczys
Jakub Stasiak
Jakub Vysoky
Jakub Wilk
James Cleveland
James Curtin
James Firth
James Gerity
James Polley
Jan Pokorný
Jannis Leidel
Jarek Potiuk
jarondl
Jason Curtis
Jason R. Coombs
JasonMo
JasonMo1
Jay Graves
Jean-Christophe Fillion-Robin
Jeff Barber
Jeff Dairiki
Jelmer Vernooij
jenix21
Jeremy Stanley
Jeremy Zafran
Jesse Rittner
Jiashuo Li
Jim Fisher
Jim Garrison
Jiun Bae
Jivan Amara
Joe Bylund
Joe Michelini
John Paton
John T. Wodder II
John-Scott Atlakson
johnthagen
Jon Banafato
Jon Dufresne
Jon Parise
Jonas Nockert
Jonathan Herbert
Joonatan Partanen
Joost Molenaar
Jorge Niedbalski
Joseph Bylund
Joseph Long
Josh Bronson
Josh Hansen
Josh Schneier
Juan Luis Cano Rodríguez
Juanjo Bazán
Judah Rand
Julian Berman
Julian Gethmann
Julien Demoor
Jussi Kukkonen
jwg4
Jyrki Pulliainen
Kai Chen
Kai Mueller
Kamal Bin Mustafa
kasium
kaustav haldar
keanemind
Keith Maxwell
Kelsey Hightower
Kenneth Belitzky
Kenneth Reitz
Kevin Burke
Kevin Carter
Kevin Frommelt
Kevin R Patterson
Kexuan Sun
Kit Randel
Klaas van Schelven
KOLANICH
kpinc
Krishna Oza
Kumar McMillan
Kyle Persohn
lakshmanaram
Laszlo Kiss-Kollar
Laurent Bristiel
Laurent LAPORTE
Laurie O
Laurie Opperman
layday
Leon Sasson
Lev Givon
Lincoln de Sousa
Lipis
lorddavidiii
Loren Carvalho
Lucas Cimon
Ludovic Gasc
Lukas Juhrich
Luke Macken
Luo Jiebin
luojiebin
luz.paz
László Kiss Kollár
M00nL1ght
Marc Abramowitz
Marc Tamlyn
Marcus Smith
Mariatta
Mark Kohler
Mark Williams
Markus Hametner
Martey Dodoo
Martin Fischer
Martin Häcker
Martin Pavlasek
Masaki
Masklinn
Matej Stuchlik
Mathew Jennings
Mathieu Bridon
Mathieu Kniewallner
Matt Bacchi
Matt Good
Matt Maker
Matt Robenolt
matthew
Matthew Einhorn
Matthew Feickert
Matthew Gilliard
Matthew Iversen
Matthew Treinish
Matthew Trumbell
Matthew Willson
Matthias Bussonnier
mattip
Maurits van Rees
Max W Chase
Maxim Kurnikov
Maxime Rouyrre
mayeut
mbaluna
mdebi
memoselyk
meowmeowcat
Michael
Michael Aquilina
Michael E. Karpeles
Michael Klich
Michael Mintz
Michael Williamson
michaelpacer
Michał Górny
Mickaël Schoentgen
Miguel Araujo Perez
Mihir Singh
Mike
Mike Hendricks
Min RK
MinRK
Miro Hrončok
Monica Baluna
montefra
Monty Taylor
Muha Ajjan
Nadav Wexler
Nahuel Ambrosini
Nate Coraor
Nate Prewitt
Nathan Houghton
Nathaniel J. Smith
Nehal J Wani
Neil Botelho
Nguyễn Gia Phong
Nicholas Serra
Nick Coghlan
Nick Stenning
Nick Timkovich
Nicolas Bock
Nicole Harris
Nikhil Benesch
Nikhil Ladha
Nikita Chepanov
Nikolay Korolev
Nipunn Koorapati
Nitesh Sharma
Niyas Sait
Noah
Noah Gorny
Nowell Strite
NtaleGrey
nvdv
OBITORASU
Ofek Lev
ofrinevo
Oliver Freund
Oliver Jeeves
Oliver Mannion
Oliver Tonnhofer
Olivier Girardot
Olivier Grisel
Ollie Rutherfurd
OMOTO Kenji
Omry Yadan
onlinejudge95
Oren Held
Oscar Benjamin
Oz N Tiram
Pachwenko
Patrick Dubroy
Patrick Jenkins
Patrick Lawson
patricktokeeffe
Patrik Kopkan
Paul Kehrer
Paul Moore
Paul Nasrat
Paul Oswald
Paul van der Linden
Paulus Schoutsen
Pavel Safronov
Pavithra Eswaramoorthy
Pawel Jasinski
Paweł Szramowski
Pekka Klärck
Peter Gessler
Peter Lisák
Peter Waller
petr-tik
Phaneendra Chiruvella
Phil Elson
Phil Freo
Phil Pennock
Phil Whelan
Philip Jägenstedt
Philip Molloy
Philippe Ombredanne
Pi Delport
Pierre-Yves Rofes
Pieter Degroote
pip
Prabakaran Kumaresshan
Prabhjyotsing Surjit Singh Sodhi
Prabhu Marappan
Pradyun Gedam
Prashant Sharma
Pratik Mallya
pre-commit-ci[bot]
Preet Thakkar
Preston Holmes
Przemek Wrzos
Pulkit Goyal
q0w
Qiangning Hong
Quentin Lee
Quentin Pradet
R. David Murray
Rafael Caricio
Ralf Schmitt
Razzi Abuissa
rdb
Reece Dunham
Remi Rampin
Rene Dudfield
Riccardo Magliocchetti
Riccardo Schirone
Richard Jones
Richard Si
Ricky Ng-Adam
Rishi
RobberPhex
Robert Collins
Robert McGibbon
Robert Pollak
Robert T. McGibbon
robin elisha robinson
Roey Berman
Rohan Jain
Roman Bogorodskiy
Roman Donchenko
Romuald Brunet
ronaudinho
Ronny Pfannschmidt
Rory McCann
Ross Brattain
Roy Wellington Ⅳ
Ruairidh MacLeod
Russell Keith-Magee
Ryan Shepherd
Ryan Wooden
ryneeverett
Sachi King
Salvatore Rinchiera
sandeepkiran-js
Savio Jomton
schlamar
Scott Kitterman
Sean
seanj
Sebastian Jordan
Sebastian Schaetz
Segev Finer
SeongSoo Cho
Sergey Vasilyev
Seth Michael Larson
Seth Woodworth
Shantanu
shireenrao
Shivansh-007
Shlomi Fish
Shovan Maity
Simeon Visser
Simon Cross
Simon Pichugin
sinoroc
sinscary
snook92
socketubs
Sorin Sbarnea
Srinivas Nyayapati
Stavros Korokithakis
Stefan Scherfke
Stefano Rivera
Stephan Erb
Stephen Rosen
stepshal
Steve (Gadget) Barnes
Steve Barnes
Steve Dower
Steve Kowalik
Steven Myint
Steven Silvester
stonebig
Stéphane Bidoul
Stéphane Bidoul (ACSONE)
Stéphane Klein
Sumana Harihareswara
Surbhi Sharma
Sviatoslav Sydorenko
Swat009
Sylvain
Takayuki SHIMIZUKAWA
Taneli Hukkinen
tbeswick
Thiago
Thijs Triemstra
Thomas Fenzl
Thomas Grainger
Thomas Guettler
Thomas Johansson
Thomas Kluyver
Thomas Smith
Thomas VINCENT
Tim D. Smith
Tim Gates
Tim Harder
Tim Heap
tim smith
tinruufu
Tobias Hermann
Tom Forbes
Tom Freudenheim
Tom V
Tomas Hrnciar
Tomas Orsava
Tomer Chachamu
Tommi Enenkel | AnB
Tomáš Hrnčiar
Tony Beswick
Tony Narlock
Tony Zhaocheng Tan
TonyBeswick
toonarmycaptain
Toshio Kuratomi
toxinu
Travis Swicegood
Tushar Sadhwani
Tzu-ping Chung
Valentin Haenel
Victor Stinner
victorvpaulo
Vikram - Google
Viktor Szépe
Ville Skyttä
Vinay Sajip
Vincent Philippon
Vinicyus Macedo
Vipul Kumar
Vitaly Babiy
Vladimir Rutsky
W. Trevor King
Wil Tan
Wilfred Hughes
William Edwards
William ML Leslie
William T Olson
William Woodruff
Wilson Mo
wim glenn
Winson Luk
Wolfgang Maier
Wu Zhenyu
XAMES3
Xavier Fernandez
xoviat
xtreak
YAMAMOTO Takashi
Yen Chi Hsuan
Yeray Diaz Diaz
Yoval P
Yu Jian
Yuan Jing Vincent Yan
Yusuke Hayashi
Zearin
Zhiping Deng
ziebam
Zvezdan Petkovic
Łukasz Langa
Роман Донченко
Семён Марьясин
rekcäH nitraM

@ -0,0 +1,20 @@
Copyright (c) 2008-present The pip developers (see AUTHORS.txt file)
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

@ -0,0 +1,90 @@
Metadata-Version: 2.1
Name: pip
Version: 23.2.1
Summary: The PyPA recommended tool for installing Python packages.
Home-page: https://pip.pypa.io/
Author: The pip developers
Author-email: distutils-sig@python.org
License: MIT
Project-URL: Documentation, https://pip.pypa.io
Project-URL: Source, https://github.com/pypa/pip
Project-URL: Changelog, https://pip.pypa.io/en/stable/news/
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Topic :: Software Development :: Build Tools
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Requires-Python: >=3.7
License-File: LICENSE.txt
License-File: AUTHORS.txt
pip - The Python Package Installer
==================================
.. image:: https://img.shields.io/pypi/v/pip.svg
:target: https://pypi.org/project/pip/
.. image:: https://readthedocs.org/projects/pip/badge/?version=latest
:target: https://pip.pypa.io/en/latest
pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
Please take a look at our documentation for how to install and use pip:
* `Installation`_
* `Usage`_
We release updates regularly, with a new version every 3 months. Find more details in our documentation:
* `Release notes`_
* `Release process`_
In pip 20.3, we've `made a big improvement to the heart of pip`_; `learn more`_. We want your input, so `sign up for our user experience research studies`_ to help us do it right.
**Note**: pip 21.0, in January 2021, removed Python 2 support, per pip's `Python 2 support policy`_. Please migrate to Python 3.
If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms:
* `Issue tracking`_
* `Discourse channel`_
* `User IRC`_
If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms:
* `GitHub page`_
* `Development documentation`_
* `Development IRC`_
Code of Conduct
---------------
Everyone interacting in the pip project's codebases, issue trackers, chat
rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
.. _package installer: https://packaging.python.org/guides/tool-recommendations/
.. _Python Package Index: https://pypi.org
.. _Installation: https://pip.pypa.io/en/stable/installation/
.. _Usage: https://pip.pypa.io/en/stable/
.. _Release notes: https://pip.pypa.io/en/stable/news.html
.. _Release process: https://pip.pypa.io/en/latest/development/release-process/
.. _GitHub page: https://github.com/pypa/pip
.. _Development documentation: https://pip.pypa.io/en/latest/development
.. _made a big improvement to the heart of pip: https://pyfound.blogspot.com/2020/11/pip-20-3-new-resolver.html
.. _learn more: https://pip.pypa.io/en/latest/user_guide/#changes-to-the-pip-dependency-resolver-in-20-3-2020
.. _sign up for our user experience research studies: https://pyfound.blogspot.com/2020/03/new-pip-resolver-to-roll-out-this-year.html
.. _Python 2 support policy: https://pip.pypa.io/en/latest/development/release-process/#python-2-support
.. _Issue tracking: https://github.com/pypa/pip/issues
.. _Discourse channel: https://discuss.python.org/c/packaging
.. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa
.. _Development IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev
.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md

File diff suppressed because it is too large Load Diff

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.40.0)
Root-Is-Purelib: true
Tag: py3-none-any

@ -0,0 +1,4 @@
[console_scripts]
pip = pip._internal.cli.main:main
pip3 = pip._internal.cli.main:main
pip3.11 = pip._internal.cli.main:main

@ -0,0 +1,13 @@
from typing import List, Optional
__version__ = "23.2.1"
def main(args: Optional[List[str]] = None) -> int:
"""This is an internal API only meant for use by pip's own console scripts.
For additional details, see https://github.com/pypa/pip/issues/7498.
"""
from pip._internal.utils.entrypoints import _wrapper
return _wrapper(args)

@ -0,0 +1,24 @@
import os
import sys
# Remove '' and current working directory from the first entry
# of sys.path, if present to avoid using current directory
# in pip commands check, freeze, install, list and show,
# when invoked as python -m pip <command>
if sys.path[0] in ("", os.getcwd()):
sys.path.pop(0)
# If we are running from a wheel, add the wheel to sys.path
# This allows the usage python pip-*.whl/pip install pip-*.whl
if __package__ == "":
# __file__ is pip-*.whl/pip/__main__.py
# first dirname call strips of '/__main__.py', second strips off '/pip'
# Resulting path is the name of the wheel itself
# Add that to sys.path so we can import pip
path = os.path.dirname(os.path.dirname(__file__))
sys.path.insert(0, path)
if __name__ == "__main__":
from pip._internal.cli.main import main as _main
sys.exit(_main())

@ -0,0 +1,50 @@
"""Execute exactly this copy of pip, within a different environment.
This file is named as it is, to ensure that this module can't be imported via
an import statement.
"""
# /!\ This version compatibility check section must be Python 2 compatible. /!\
import sys
# Copied from setup.py
PYTHON_REQUIRES = (3, 7)
def version_str(version): # type: ignore
return ".".join(str(v) for v in version)
if sys.version_info[:2] < PYTHON_REQUIRES:
raise SystemExit(
"This version of pip does not support python {} (requires >={}).".format(
version_str(sys.version_info[:2]), version_str(PYTHON_REQUIRES)
)
)
# From here on, we can use Python 3 features, but the syntax must remain
# Python 2 compatible.
import runpy # noqa: E402
from importlib.machinery import PathFinder # noqa: E402
from os.path import dirname # noqa: E402
PIP_SOURCES_ROOT = dirname(dirname(__file__))
class PipImportRedirectingFinder:
@classmethod
def find_spec(self, fullname, path=None, target=None): # type: ignore
if fullname != "pip":
return None
spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target)
assert spec, (PIP_SOURCES_ROOT, fullname)
return spec
sys.meta_path.insert(0, PipImportRedirectingFinder())
assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module"
runpy.run_module("pip", run_name="__main__", alter_sys=True)

@ -0,0 +1,19 @@
from typing import List, Optional
import pip._internal.utils.inject_securetransport # noqa
from pip._internal.utils import _log
# init_logging() must be called before any call to logging.getLogger()
# which happens at import of most modules.
_log.init_logging()
def main(args: (Optional[List[str]]) = None) -> int:
"""This is preserved for old console scripts that may still be referencing
it.
For additional details, see https://github.com/pypa/pip/issues/7498.
"""
from pip._internal.utils.entrypoints import _wrapper
return _wrapper(args)

@ -0,0 +1,311 @@
"""Build Environment used for isolation during sdist building
"""
import logging
import os
import pathlib
import site
import sys
import textwrap
from collections import OrderedDict
from types import TracebackType
from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type, Union
from pip._vendor.certifi import where
from pip._vendor.packaging.requirements import Requirement
from pip._vendor.packaging.version import Version
from pip import __file__ as pip_location
from pip._internal.cli.spinners import open_spinner
from pip._internal.locations import get_platlib, get_purelib, get_scheme
from pip._internal.metadata import get_default_environment, get_environment
from pip._internal.utils.subprocess import call_subprocess
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
if TYPE_CHECKING:
from pip._internal.index.package_finder import PackageFinder
logger = logging.getLogger(__name__)
def _dedup(a: str, b: str) -> Union[Tuple[str], Tuple[str, str]]:
return (a, b) if a != b else (a,)
class _Prefix:
def __init__(self, path: str) -> None:
self.path = path
self.setup = False
scheme = get_scheme("", prefix=path)
self.bin_dir = scheme.scripts
self.lib_dirs = _dedup(scheme.purelib, scheme.platlib)
def get_runnable_pip() -> str:
"""Get a file to pass to a Python executable, to run the currently-running pip.
This is used to run a pip subprocess, for installing requirements into the build
environment.
"""
source = pathlib.Path(pip_location).resolve().parent
if not source.is_dir():
# This would happen if someone is using pip from inside a zip file. In that
# case, we can use that directly.
return str(source)
return os.fsdecode(source / "__pip-runner__.py")
def _get_system_sitepackages() -> Set[str]:
"""Get system site packages
Usually from site.getsitepackages,
but fallback on `get_purelib()/get_platlib()` if unavailable
(e.g. in a virtualenv created by virtualenv<20)
Returns normalized set of strings.
"""
if hasattr(site, "getsitepackages"):
system_sites = site.getsitepackages()
else:
# virtualenv < 20 overwrites site.py without getsitepackages
# fallback on get_purelib/get_platlib.
# this is known to miss things, but shouldn't in the cases
# where getsitepackages() has been removed (inside a virtualenv)
system_sites = [get_purelib(), get_platlib()]
return {os.path.normcase(path) for path in system_sites}
class BuildEnvironment:
"""Creates and manages an isolated environment to install build deps"""
def __init__(self) -> None:
temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True)
self._prefixes = OrderedDict(
(name, _Prefix(os.path.join(temp_dir.path, name)))
for name in ("normal", "overlay")
)
self._bin_dirs: List[str] = []
self._lib_dirs: List[str] = []
for prefix in reversed(list(self._prefixes.values())):
self._bin_dirs.append(prefix.bin_dir)
self._lib_dirs.extend(prefix.lib_dirs)
# Customize site to:
# - ensure .pth files are honored
# - prevent access to system site packages
system_sites = _get_system_sitepackages()
self._site_dir = os.path.join(temp_dir.path, "site")
if not os.path.exists(self._site_dir):
os.mkdir(self._site_dir)
with open(
os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8"
) as fp:
fp.write(
textwrap.dedent(
"""
import os, site, sys
# First, drop system-sites related paths.
original_sys_path = sys.path[:]
known_paths = set()
for path in {system_sites!r}:
site.addsitedir(path, known_paths=known_paths)
system_paths = set(
os.path.normcase(path)
for path in sys.path[len(original_sys_path):]
)
original_sys_path = [
path for path in original_sys_path
if os.path.normcase(path) not in system_paths
]
sys.path = original_sys_path
# Second, add lib directories.
# ensuring .pth file are processed.
for path in {lib_dirs!r}:
assert not path in sys.path
site.addsitedir(path)
"""
).format(system_sites=system_sites, lib_dirs=self._lib_dirs)
)
def __enter__(self) -> None:
self._save_env = {
name: os.environ.get(name, None)
for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH")
}
path = self._bin_dirs[:]
old_path = self._save_env["PATH"]
if old_path:
path.extend(old_path.split(os.pathsep))
pythonpath = [self._site_dir]
os.environ.update(
{
"PATH": os.pathsep.join(path),
"PYTHONNOUSERSITE": "1",
"PYTHONPATH": os.pathsep.join(pythonpath),
}
)
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
for varname, old_value in self._save_env.items():
if old_value is None:
os.environ.pop(varname, None)
else:
os.environ[varname] = old_value
def check_requirements(
self, reqs: Iterable[str]
) -> Tuple[Set[Tuple[str, str]], Set[str]]:
"""Return 2 sets:
- conflicting requirements: set of (installed, wanted) reqs tuples
- missing requirements: set of reqs
"""
missing = set()
conflicting = set()
if reqs:
env = (
get_environment(self._lib_dirs)
if hasattr(self, "_lib_dirs")
else get_default_environment()
)
for req_str in reqs:
req = Requirement(req_str)
# We're explicitly evaluating with an empty extra value, since build
# environments are not provided any mechanism to select specific extras.
if req.marker is not None and not req.marker.evaluate({"extra": ""}):
continue
dist = env.get_distribution(req.name)
if not dist:
missing.add(req_str)
continue
if isinstance(dist.version, Version):
installed_req_str = f"{req.name}=={dist.version}"
else:
installed_req_str = f"{req.name}==={dist.version}"
if not req.specifier.contains(dist.version, prereleases=True):
conflicting.add((installed_req_str, req_str))
# FIXME: Consider direct URL?
return conflicting, missing
def install_requirements(
self,
finder: "PackageFinder",
requirements: Iterable[str],
prefix_as_string: str,
*,
kind: str,
) -> None:
prefix = self._prefixes[prefix_as_string]
assert not prefix.setup
prefix.setup = True
if not requirements:
return
self._install_requirements(
get_runnable_pip(),
finder,
requirements,
prefix,
kind=kind,
)
@staticmethod
def _install_requirements(
pip_runnable: str,
finder: "PackageFinder",
requirements: Iterable[str],
prefix: _Prefix,
*,
kind: str,
) -> None:
args: List[str] = [
sys.executable,
pip_runnable,
"install",
"--ignore-installed",
"--no-user",
"--prefix",
prefix.path,
"--no-warn-script-location",
]
if logger.getEffectiveLevel() <= logging.DEBUG:
args.append("-v")
for format_control in ("no_binary", "only_binary"):
formats = getattr(finder.format_control, format_control)
args.extend(
(
"--" + format_control.replace("_", "-"),
",".join(sorted(formats or {":none:"})),
)
)
index_urls = finder.index_urls
if index_urls:
args.extend(["-i", index_urls[0]])
for extra_index in index_urls[1:]:
args.extend(["--extra-index-url", extra_index])
else:
args.append("--no-index")
for link in finder.find_links:
args.extend(["--find-links", link])
for host in finder.trusted_hosts:
args.extend(["--trusted-host", host])
if finder.allow_all_prereleases:
args.append("--pre")
if finder.prefer_binary:
args.append("--prefer-binary")
args.append("--")
args.extend(requirements)
extra_environ = {"_PIP_STANDALONE_CERT": where()}
with open_spinner(f"Installing {kind}") as spinner:
call_subprocess(
args,
command_desc=f"pip subprocess to install {kind}",
spinner=spinner,
extra_environ=extra_environ,
)
class NoOpBuildEnvironment(BuildEnvironment):
"""A no-op drop-in replacement for BuildEnvironment"""
def __init__(self) -> None:
pass
def __enter__(self) -> None:
pass
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
pass
def cleanup(self) -> None:
pass
def install_requirements(
self,
finder: "PackageFinder",
requirements: Iterable[str],
prefix_as_string: str,
*,
kind: str,
) -> None:
raise NotImplementedError()

@ -0,0 +1,292 @@
"""Cache Management
"""
import hashlib
import json
import logging
import os
from pathlib import Path
from typing import Any, Dict, List, Optional
from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
from pip._vendor.packaging.utils import canonicalize_name
from pip._internal.exceptions import InvalidWheelFilename
from pip._internal.models.direct_url import DirectUrl
from pip._internal.models.link import Link
from pip._internal.models.wheel import Wheel
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
from pip._internal.utils.urls import path_to_url
logger = logging.getLogger(__name__)
ORIGIN_JSON_NAME = "origin.json"
def _hash_dict(d: Dict[str, str]) -> str:
"""Return a stable sha224 of a dictionary."""
s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
return hashlib.sha224(s.encode("ascii")).hexdigest()
class Cache:
"""An abstract class - provides cache directories for data from links
:param cache_dir: The root of the cache.
"""
def __init__(self, cache_dir: str) -> None:
super().__init__()
assert not cache_dir or os.path.isabs(cache_dir)
self.cache_dir = cache_dir or None
def _get_cache_path_parts(self, link: Link) -> List[str]:
"""Get parts of part that must be os.path.joined with cache_dir"""
# We want to generate an url to use as our cache key, we don't want to
# just re-use the URL because it might have other items in the fragment
# and we don't care about those.
key_parts = {"url": link.url_without_fragment}
if link.hash_name is not None and link.hash is not None:
key_parts[link.hash_name] = link.hash
if link.subdirectory_fragment:
key_parts["subdirectory"] = link.subdirectory_fragment
# Include interpreter name, major and minor version in cache key
# to cope with ill-behaved sdists that build a different wheel
# depending on the python version their setup.py is being run on,
# and don't encode the difference in compatibility tags.
# https://github.com/pypa/pip/issues/7296
key_parts["interpreter_name"] = interpreter_name()
key_parts["interpreter_version"] = interpreter_version()
# Encode our key url with sha224, we'll use this because it has similar
# security properties to sha256, but with a shorter total output (and
# thus less secure). However the differences don't make a lot of
# difference for our use case here.
hashed = _hash_dict(key_parts)
# We want to nest the directories some to prevent having a ton of top
# level directories where we might run out of sub directories on some
# FS.
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
return parts
def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]:
can_not_cache = not self.cache_dir or not canonical_package_name or not link
if can_not_cache:
return []
candidates = []
path = self.get_path_for_link(link)
if os.path.isdir(path):
for candidate in os.listdir(path):
candidates.append((candidate, path))
return candidates
def get_path_for_link(self, link: Link) -> str:
"""Return a directory to store cached items in for link."""
raise NotImplementedError()
def get(
self,
link: Link,
package_name: Optional[str],
supported_tags: List[Tag],
) -> Link:
"""Returns a link to a cached item if it exists, otherwise returns the
passed link.
"""
raise NotImplementedError()
class SimpleWheelCache(Cache):
"""A cache of wheels for future installs."""
def __init__(self, cache_dir: str) -> None:
super().__init__(cache_dir)
def get_path_for_link(self, link: Link) -> str:
"""Return a directory to store cached wheels for link
Because there are M wheels for any one sdist, we provide a directory
to cache them in, and then consult that directory when looking up
cache hits.
We only insert things into the cache if they have plausible version
numbers, so that we don't contaminate the cache with things that were
not unique. E.g. ./package might have dozens of installs done for it
and build a version of 0.0...and if we built and cached a wheel, we'd
end up using the same wheel even if the source has been edited.
:param link: The link of the sdist for which this will cache wheels.
"""
parts = self._get_cache_path_parts(link)
assert self.cache_dir
# Store wheels within the root cache_dir
return os.path.join(self.cache_dir, "wheels", *parts)
def get(
self,
link: Link,
package_name: Optional[str],
supported_tags: List[Tag],
) -> Link:
candidates = []
if not package_name:
return link
canonical_package_name = canonicalize_name(package_name)
for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name):
try:
wheel = Wheel(wheel_name)
except InvalidWheelFilename:
continue
if canonicalize_name(wheel.name) != canonical_package_name:
logger.debug(
"Ignoring cached wheel %s for %s as it "
"does not match the expected distribution name %s.",
wheel_name,
link,
package_name,
)
continue
if not wheel.supported(supported_tags):
# Built for a different python/arch/etc
continue
candidates.append(
(
wheel.support_index_min(supported_tags),
wheel_name,
wheel_dir,
)
)
if not candidates:
return link
_, wheel_name, wheel_dir = min(candidates)
return Link(path_to_url(os.path.join(wheel_dir, wheel_name)))
class EphemWheelCache(SimpleWheelCache):
"""A SimpleWheelCache that creates it's own temporary cache directory"""
def __init__(self) -> None:
self._temp_dir = TempDirectory(
kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
globally_managed=True,
)
super().__init__(self._temp_dir.path)
class CacheEntry:
def __init__(
self,
link: Link,
persistent: bool,
):
self.link = link
self.persistent = persistent
self.origin: Optional[DirectUrl] = None
origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME
if origin_direct_url_path.exists():
try:
self.origin = DirectUrl.from_json(
origin_direct_url_path.read_text(encoding="utf-8")
)
except Exception as e:
logger.warning(
"Ignoring invalid cache entry origin file %s for %s (%s)",
origin_direct_url_path,
link.filename,
e,
)
class WheelCache(Cache):
"""Wraps EphemWheelCache and SimpleWheelCache into a single Cache
This Cache allows for gracefully degradation, using the ephem wheel cache
when a certain link is not found in the simple wheel cache first.
"""
def __init__(self, cache_dir: str) -> None:
super().__init__(cache_dir)
self._wheel_cache = SimpleWheelCache(cache_dir)
self._ephem_cache = EphemWheelCache()
def get_path_for_link(self, link: Link) -> str:
return self._wheel_cache.get_path_for_link(link)
def get_ephem_path_for_link(self, link: Link) -> str:
return self._ephem_cache.get_path_for_link(link)
def get(
self,
link: Link,
package_name: Optional[str],
supported_tags: List[Tag],
) -> Link:
cache_entry = self.get_cache_entry(link, package_name, supported_tags)
if cache_entry is None:
return link
return cache_entry.link
def get_cache_entry(
self,
link: Link,
package_name: Optional[str],
supported_tags: List[Tag],
) -> Optional[CacheEntry]:
"""Returns a CacheEntry with a link to a cached item if it exists or
None. The cache entry indicates if the item was found in the persistent
or ephemeral cache.
"""
retval = self._wheel_cache.get(
link=link,
package_name=package_name,
supported_tags=supported_tags,
)
if retval is not link:
return CacheEntry(retval, persistent=True)
retval = self._ephem_cache.get(
link=link,
package_name=package_name,
supported_tags=supported_tags,
)
if retval is not link:
return CacheEntry(retval, persistent=False)
return None
@staticmethod
def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None:
origin_path = Path(cache_dir) / ORIGIN_JSON_NAME
if origin_path.exists():
try:
origin = DirectUrl.from_json(origin_path.read_text(encoding="utf-8"))
except Exception as e:
logger.warning(
"Could not read origin file %s in cache entry (%s). "
"Will attempt to overwrite it.",
origin_path,
e,
)
else:
# TODO: use DirectUrl.equivalent when
# https://github.com/pypa/pip/pull/10564 is merged.
if origin.url != download_info.url:
logger.warning(
"Origin URL %s in cache entry %s does not match download URL "
"%s. This is likely a pip bug or a cache corruption issue. "
"Will overwrite it with the new value.",
origin.url,
cache_dir,
download_info.url,
)
origin_path.write_text(download_info.to_json(), encoding="utf-8")

@ -0,0 +1,4 @@
"""Subpackage containing all of pip's command line interface related code
"""
# This file intentionally does not import submodules

@ -0,0 +1,171 @@
"""Logic that powers autocompletion installed by ``pip completion``.
"""
import optparse
import os
import sys
from itertools import chain
from typing import Any, Iterable, List, Optional
from pip._internal.cli.main_parser import create_main_parser
from pip._internal.commands import commands_dict, create_command
from pip._internal.metadata import get_default_environment
def autocomplete() -> None:
"""Entry Point for completion of main and subcommand options."""
# Don't complete if user hasn't sourced bash_completion file.
if "PIP_AUTO_COMPLETE" not in os.environ:
return
cwords = os.environ["COMP_WORDS"].split()[1:]
cword = int(os.environ["COMP_CWORD"])
try:
current = cwords[cword - 1]
except IndexError:
current = ""
parser = create_main_parser()
subcommands = list(commands_dict)
options = []
# subcommand
subcommand_name: Optional[str] = None
for word in cwords:
if word in subcommands:
subcommand_name = word
break
# subcommand options
if subcommand_name is not None:
# special case: 'help' subcommand has no options
if subcommand_name == "help":
sys.exit(1)
# special case: list locally installed dists for show and uninstall
should_list_installed = not current.startswith("-") and subcommand_name in [
"show",
"uninstall",
]
if should_list_installed:
env = get_default_environment()
lc = current.lower()
installed = [
dist.canonical_name
for dist in env.iter_installed_distributions(local_only=True)
if dist.canonical_name.startswith(lc)
and dist.canonical_name not in cwords[1:]
]
# if there are no dists installed, fall back to option completion
if installed:
for dist in installed:
print(dist)
sys.exit(1)
should_list_installables = (
not current.startswith("-") and subcommand_name == "install"
)
if should_list_installables:
for path in auto_complete_paths(current, "path"):
print(path)
sys.exit(1)
subcommand = create_command(subcommand_name)
for opt in subcommand.parser.option_list_all:
if opt.help != optparse.SUPPRESS_HELP:
for opt_str in opt._long_opts + opt._short_opts:
options.append((opt_str, opt.nargs))
# filter out previously specified options from available options
prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
options = [(x, v) for (x, v) in options if x not in prev_opts]
# filter options by current input
options = [(k, v) for k, v in options if k.startswith(current)]
# get completion type given cwords and available subcommand options
completion_type = get_path_completion_type(
cwords,
cword,
subcommand.parser.option_list_all,
)
# get completion files and directories if ``completion_type`` is
# ``<file>``, ``<dir>`` or ``<path>``
if completion_type:
paths = auto_complete_paths(current, completion_type)
options = [(path, 0) for path in paths]
for option in options:
opt_label = option[0]
# append '=' to options which require args
if option[1] and option[0][:2] == "--":
opt_label += "="
print(opt_label)
else:
# show main parser options only when necessary
opts = [i.option_list for i in parser.option_groups]
opts.append(parser.option_list)
flattened_opts = chain.from_iterable(opts)
if current.startswith("-"):
for opt in flattened_opts:
if opt.help != optparse.SUPPRESS_HELP:
subcommands += opt._long_opts + opt._short_opts
else:
# get completion type given cwords and all available options
completion_type = get_path_completion_type(cwords, cword, flattened_opts)
if completion_type:
subcommands = list(auto_complete_paths(current, completion_type))
print(" ".join([x for x in subcommands if x.startswith(current)]))
sys.exit(1)
def get_path_completion_type(
cwords: List[str], cword: int, opts: Iterable[Any]
) -> Optional[str]:
"""Get the type of path completion (``file``, ``dir``, ``path`` or None)
:param cwords: same as the environmental variable ``COMP_WORDS``
:param cword: same as the environmental variable ``COMP_CWORD``
:param opts: The available options to check
:return: path completion type (``file``, ``dir``, ``path`` or None)
"""
if cword < 2 or not cwords[cword - 2].startswith("-"):
return None
for opt in opts:
if opt.help == optparse.SUPPRESS_HELP:
continue
for o in str(opt).split("/"):
if cwords[cword - 2].split("=")[0] == o:
if not opt.metavar or any(
x in ("path", "file", "dir") for x in opt.metavar.split("/")
):
return opt.metavar
return None
def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]:
"""If ``completion_type`` is ``file`` or ``path``, list all regular files
and directories starting with ``current``; otherwise only list directories
starting with ``current``.
:param current: The word to be completed
:param completion_type: path completion type(``file``, ``path`` or ``dir``)
:return: A generator of regular files and/or directories
"""
directory, filename = os.path.split(current)
current_path = os.path.abspath(directory)
# Don't complete paths if they can't be accessed
if not os.access(current_path, os.R_OK):
return
filename = os.path.normcase(filename)
# list all files that start with ``filename``
file_list = (
x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename)
)
for f in file_list:
opt = os.path.join(current_path, f)
comp_file = os.path.normcase(os.path.join(directory, f))
# complete regular files when there is not ``<dir>`` after option
# complete directories when there is ``<file>``, ``<path>`` or
# ``<dir>``after option
if completion_type != "dir" and os.path.isfile(opt):
yield comp_file
elif os.path.isdir(opt):
yield os.path.join(comp_file, "")

@ -0,0 +1,236 @@
"""Base Command class, and related routines"""
import functools
import logging
import logging.config
import optparse
import os
import sys
import traceback
from optparse import Values
from typing import Any, Callable, List, Optional, Tuple
from pip._vendor.rich import traceback as rich_traceback
from pip._internal.cli import cmdoptions
from pip._internal.cli.command_context import CommandContextMixIn
from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
from pip._internal.cli.status_codes import (
ERROR,
PREVIOUS_BUILD_DIR_ERROR,
UNKNOWN_ERROR,
VIRTUALENV_NOT_FOUND,
)
from pip._internal.exceptions import (
BadCommand,
CommandError,
DiagnosticPipError,
InstallationError,
NetworkConnectionError,
PreviousBuildDirError,
UninstallationError,
)
from pip._internal.utils.filesystem import check_path_owner
from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
from pip._internal.utils.misc import get_prog, normalize_path
from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry
from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry
from pip._internal.utils.virtualenv import running_under_virtualenv
__all__ = ["Command"]
logger = logging.getLogger(__name__)
class Command(CommandContextMixIn):
usage: str = ""
ignore_require_venv: bool = False
def __init__(self, name: str, summary: str, isolated: bool = False) -> None:
super().__init__()
self.name = name
self.summary = summary
self.parser = ConfigOptionParser(
usage=self.usage,
prog=f"{get_prog()} {name}",
formatter=UpdatingDefaultsHelpFormatter(),
add_help_option=False,
name=name,
description=self.__doc__,
isolated=isolated,
)
self.tempdir_registry: Optional[TempDirRegistry] = None
# Commands should add options to this option group
optgroup_name = f"{self.name.capitalize()} Options"
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
# Add the general options
gen_opts = cmdoptions.make_option_group(
cmdoptions.general_group,
self.parser,
)
self.parser.add_option_group(gen_opts)
self.add_options()
def add_options(self) -> None:
pass
def handle_pip_version_check(self, options: Values) -> None:
"""
This is a no-op so that commands by default do not do the pip version
check.
"""
# Make sure we do the pip version check if the index_group options
# are present.
assert not hasattr(options, "no_index")
def run(self, options: Values, args: List[str]) -> int:
raise NotImplementedError
def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]:
# factored out for testability
return self.parser.parse_args(args)
def main(self, args: List[str]) -> int:
try:
with self.main_context():
return self._main(args)
finally:
logging.shutdown()
def _main(self, args: List[str]) -> int:
# We must initialize this before the tempdir manager, otherwise the
# configuration would not be accessible by the time we clean up the
# tempdir manager.
self.tempdir_registry = self.enter_context(tempdir_registry())
# Intentionally set as early as possible so globally-managed temporary
# directories are available to the rest of the code.
self.enter_context(global_tempdir_manager())
options, args = self.parse_args(args)
# Set verbosity so that it can be used elsewhere.
self.verbosity = options.verbose - options.quiet
level_number = setup_logging(
verbosity=self.verbosity,
no_color=options.no_color,
user_log_file=options.log,
)
always_enabled_features = set(options.features_enabled) & set(
cmdoptions.ALWAYS_ENABLED_FEATURES
)
if always_enabled_features:
logger.warning(
"The following features are always enabled: %s. ",
", ".join(sorted(always_enabled_features)),
)
# Make sure that the --python argument isn't specified after the
# subcommand. We can tell, because if --python was specified,
# we should only reach this point if we're running in the created
# subprocess, which has the _PIP_RUNNING_IN_SUBPROCESS environment
# variable set.
if options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
logger.critical(
"The --python option must be placed before the pip subcommand name"
)
sys.exit(ERROR)
# TODO: Try to get these passing down from the command?
# without resorting to os.environ to hold these.
# This also affects isolated builds and it should.
if options.no_input:
os.environ["PIP_NO_INPUT"] = "1"
if options.exists_action:
os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action)
if options.require_venv and not self.ignore_require_venv:
# If a venv is required check if it can really be found
if not running_under_virtualenv():
logger.critical("Could not find an activated virtualenv (required).")
sys.exit(VIRTUALENV_NOT_FOUND)
if options.cache_dir:
options.cache_dir = normalize_path(options.cache_dir)
if not check_path_owner(options.cache_dir):
logger.warning(
"The directory '%s' or its parent directory is not owned "
"or is not writable by the current user. The cache "
"has been disabled. Check the permissions and owner of "
"that directory. If executing pip with sudo, you should "
"use sudo's -H flag.",
options.cache_dir,
)
options.cache_dir = None
def intercepts_unhandled_exc(
run_func: Callable[..., int]
) -> Callable[..., int]:
@functools.wraps(run_func)
def exc_logging_wrapper(*args: Any) -> int:
try:
status = run_func(*args)
assert isinstance(status, int)
return status
except DiagnosticPipError as exc:
logger.error("[present-rich] %s", exc)
logger.debug("Exception information:", exc_info=True)
return ERROR
except PreviousBuildDirError as exc:
logger.critical(str(exc))
logger.debug("Exception information:", exc_info=True)
return PREVIOUS_BUILD_DIR_ERROR
except (
InstallationError,
UninstallationError,
BadCommand,
NetworkConnectionError,
) as exc:
logger.critical(str(exc))
logger.debug("Exception information:", exc_info=True)
return ERROR
except CommandError as exc:
logger.critical("%s", exc)
logger.debug("Exception information:", exc_info=True)
return ERROR
except BrokenStdoutLoggingError:
# Bypass our logger and write any remaining messages to
# stderr because stdout no longer works.
print("ERROR: Pipe to stdout was broken", file=sys.stderr)
if level_number <= logging.DEBUG:
traceback.print_exc(file=sys.stderr)
return ERROR
except KeyboardInterrupt:
logger.critical("Operation cancelled by user")
logger.debug("Exception information:", exc_info=True)
return ERROR
except BaseException:
logger.critical("Exception:", exc_info=True)
return UNKNOWN_ERROR
return exc_logging_wrapper
try:
if not options.debug_mode:
run = intercepts_unhandled_exc(self.run)
else:
run = self.run
rich_traceback.install(show_locals=True)
return run(options, args)
finally:
self.handle_pip_version_check(options)

File diff suppressed because it is too large Load Diff

@ -0,0 +1,27 @@
from contextlib import ExitStack, contextmanager
from typing import ContextManager, Generator, TypeVar
_T = TypeVar("_T", covariant=True)
class CommandContextMixIn:
def __init__(self) -> None:
super().__init__()
self._in_main_context = False
self._main_context = ExitStack()
@contextmanager
def main_context(self) -> Generator[None, None, None]:
assert not self._in_main_context
self._in_main_context = True
try:
with self._main_context:
yield
finally:
self._in_main_context = False
def enter_context(self, context_provider: ContextManager[_T]) -> _T:
assert self._in_main_context
return self._main_context.enter_context(context_provider)

@ -0,0 +1,79 @@
"""Primary application entrypoint.
"""
import locale
import logging
import os
import sys
import warnings
from typing import List, Optional
from pip._internal.cli.autocompletion import autocomplete
from pip._internal.cli.main_parser import parse_command
from pip._internal.commands import create_command
from pip._internal.exceptions import PipError
from pip._internal.utils import deprecation
logger = logging.getLogger(__name__)
# Do not import and use main() directly! Using it directly is actively
# discouraged by pip's maintainers. The name, location and behavior of
# this function is subject to change, so calling it directly is not
# portable across different pip versions.
# In addition, running pip in-process is unsupported and unsafe. This is
# elaborated in detail at
# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program.
# That document also provides suggestions that should work for nearly
# all users that are considering importing and using main() directly.
# However, we know that certain users will still want to invoke pip
# in-process. If you understand and accept the implications of using pip
# in an unsupported manner, the best approach is to use runpy to avoid
# depending on the exact location of this entry point.
# The following example shows how to use runpy to invoke pip in that
# case:
#
# sys.argv = ["pip", your, args, here]
# runpy.run_module("pip", run_name="__main__")
#
# Note that this will exit the process after running, unlike a direct
# call to main. As it is not safe to do any processing after calling
# main, this should not be an issue in practice.
def main(args: Optional[List[str]] = None) -> int:
if args is None:
args = sys.argv[1:]
# Suppress the pkg_resources deprecation warning
# Note - we use a module of .*pkg_resources to cover
# the normal case (pip._vendor.pkg_resources) and the
# devendored case (a bare pkg_resources)
warnings.filterwarnings(
action="ignore", category=DeprecationWarning, module=".*pkg_resources"
)
# Configure our deprecation warnings to be sent through loggers
deprecation.install_warning_logger()
autocomplete()
try:
cmd_name, cmd_args = parse_command(args)
except PipError as exc:
sys.stderr.write(f"ERROR: {exc}")
sys.stderr.write(os.linesep)
sys.exit(1)
# Needed for locale.getpreferredencoding(False) to work
# in pip._internal.utils.encoding.auto_decode
try:
locale.setlocale(locale.LC_ALL, "")
except locale.Error as e:
# setlocale can apparently crash if locale are uninitialized
logger.debug("Ignoring error %s when setting locale", e)
command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
return command.main(cmd_args)

@ -0,0 +1,134 @@
"""A single place for constructing and exposing the main parser
"""
import os
import subprocess
import sys
from typing import List, Optional, Tuple
from pip._internal.build_env import get_runnable_pip
from pip._internal.cli import cmdoptions
from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
from pip._internal.commands import commands_dict, get_similar_commands
from pip._internal.exceptions import CommandError
from pip._internal.utils.misc import get_pip_version, get_prog
__all__ = ["create_main_parser", "parse_command"]
def create_main_parser() -> ConfigOptionParser:
"""Creates and returns the main parser for pip's CLI"""
parser = ConfigOptionParser(
usage="\n%prog <command> [options]",
add_help_option=False,
formatter=UpdatingDefaultsHelpFormatter(),
name="global",
prog=get_prog(),
)
parser.disable_interspersed_args()
parser.version = get_pip_version()
# add the general options
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
parser.add_option_group(gen_opts)
# so the help formatter knows
parser.main = True # type: ignore
# create command listing for description
description = [""] + [
f"{name:27} {command_info.summary}"
for name, command_info in commands_dict.items()
]
parser.description = "\n".join(description)
return parser
def identify_python_interpreter(python: str) -> Optional[str]:
# If the named file exists, use it.
# If it's a directory, assume it's a virtual environment and
# look for the environment's Python executable.
if os.path.exists(python):
if os.path.isdir(python):
# bin/python for Unix, Scripts/python.exe for Windows
# Try both in case of odd cases like cygwin.
for exe in ("bin/python", "Scripts/python.exe"):
py = os.path.join(python, exe)
if os.path.exists(py):
return py
else:
return python
# Could not find the interpreter specified
return None
def parse_command(args: List[str]) -> Tuple[str, List[str]]:
parser = create_main_parser()
# Note: parser calls disable_interspersed_args(), so the result of this
# call is to split the initial args into the general options before the
# subcommand and everything else.
# For example:
# args: ['--timeout=5', 'install', '--user', 'INITools']
# general_options: ['--timeout==5']
# args_else: ['install', '--user', 'INITools']
general_options, args_else = parser.parse_args(args)
# --python
if general_options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
# Re-invoke pip using the specified Python interpreter
interpreter = identify_python_interpreter(general_options.python)
if interpreter is None:
raise CommandError(
f"Could not locate Python interpreter {general_options.python}"
)
pip_cmd = [
interpreter,
get_runnable_pip(),
]
pip_cmd.extend(args)
# Set a flag so the child doesn't re-invoke itself, causing
# an infinite loop.
os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1"
returncode = 0
try:
proc = subprocess.run(pip_cmd)
returncode = proc.returncode
except (subprocess.SubprocessError, OSError) as exc:
raise CommandError(f"Failed to run pip under {interpreter}: {exc}")
sys.exit(returncode)
# --version
if general_options.version:
sys.stdout.write(parser.version)
sys.stdout.write(os.linesep)
sys.exit()
# pip || pip help -> print_help()
if not args_else or (args_else[0] == "help" and len(args_else) == 1):
parser.print_help()
sys.exit()
# the subcommand name
cmd_name = args_else[0]
if cmd_name not in commands_dict:
guess = get_similar_commands(cmd_name)
msg = [f'unknown command "{cmd_name}"']
if guess:
msg.append(f'maybe you meant "{guess}"')
raise CommandError(" - ".join(msg))
# all the args without the subcommand
cmd_args = args[:]
cmd_args.remove(cmd_name)
return cmd_name, cmd_args

@ -0,0 +1,294 @@
"""Base option parser setup"""
import logging
import optparse
import shutil
import sys
import textwrap
from contextlib import suppress
from typing import Any, Dict, Generator, List, Tuple
from pip._internal.cli.status_codes import UNKNOWN_ERROR
from pip._internal.configuration import Configuration, ConfigurationError
from pip._internal.utils.misc import redact_auth_from_url, strtobool
logger = logging.getLogger(__name__)
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
"""A prettier/less verbose help formatter for optparse."""
def __init__(self, *args: Any, **kwargs: Any) -> None:
# help position must be aligned with __init__.parseopts.description
kwargs["max_help_position"] = 30
kwargs["indent_increment"] = 1
kwargs["width"] = shutil.get_terminal_size()[0] - 2
super().__init__(*args, **kwargs)
def format_option_strings(self, option: optparse.Option) -> str:
return self._format_option_strings(option)
def _format_option_strings(
self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", "
) -> str:
"""
Return a comma-separated list of option strings and metavars.
:param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
:param mvarfmt: metavar format string
:param optsep: separator
"""
opts = []
if option._short_opts:
opts.append(option._short_opts[0])
if option._long_opts:
opts.append(option._long_opts[0])
if len(opts) > 1:
opts.insert(1, optsep)
if option.takes_value():
assert option.dest is not None
metavar = option.metavar or option.dest.lower()
opts.append(mvarfmt.format(metavar.lower()))
return "".join(opts)
def format_heading(self, heading: str) -> str:
if heading == "Options":
return ""
return heading + ":\n"
def format_usage(self, usage: str) -> str:
"""
Ensure there is only one newline between usage and the first heading
if there is no description.
"""
msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " "))
return msg
def format_description(self, description: str) -> str:
# leave full control over description to us
if description:
if hasattr(self.parser, "main"):
label = "Commands"
else:
label = "Description"
# some doc strings have initial newlines, some don't
description = description.lstrip("\n")
# some doc strings have final newlines and spaces, some don't
description = description.rstrip()
# dedent, then reindent
description = self.indent_lines(textwrap.dedent(description), " ")
description = f"{label}:\n{description}\n"
return description
else:
return ""
def format_epilog(self, epilog: str) -> str:
# leave full control over epilog to us
if epilog:
return epilog
else:
return ""
def indent_lines(self, text: str, indent: str) -> str:
new_lines = [indent + line for line in text.split("\n")]
return "\n".join(new_lines)
class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
"""Custom help formatter for use in ConfigOptionParser.
This is updates the defaults before expanding them, allowing
them to show up correctly in the help listing.
Also redact auth from url type options
"""
def expand_default(self, option: optparse.Option) -> str:
default_values = None
if self.parser is not None:
assert isinstance(self.parser, ConfigOptionParser)
self.parser._update_defaults(self.parser.defaults)
assert option.dest is not None
default_values = self.parser.defaults.get(option.dest)
help_text = super().expand_default(option)
if default_values and option.metavar == "URL":
if isinstance(default_values, str):
default_values = [default_values]
# If its not a list, we should abort and just return the help text
if not isinstance(default_values, list):
default_values = []
for val in default_values:
help_text = help_text.replace(val, redact_auth_from_url(val))
return help_text
class CustomOptionParser(optparse.OptionParser):
def insert_option_group(
self, idx: int, *args: Any, **kwargs: Any
) -> optparse.OptionGroup:
"""Insert an OptionGroup at a given position."""
group = self.add_option_group(*args, **kwargs)
self.option_groups.pop()
self.option_groups.insert(idx, group)
return group
@property
def option_list_all(self) -> List[optparse.Option]:
"""Get a list of all options, including those in option groups."""
res = self.option_list[:]
for i in self.option_groups:
res.extend(i.option_list)
return res
class ConfigOptionParser(CustomOptionParser):
"""Custom option parser which updates its defaults by checking the
configuration files and environmental variables"""
def __init__(
self,
*args: Any,
name: str,
isolated: bool = False,
**kwargs: Any,
) -> None:
self.name = name
self.config = Configuration(isolated)
assert self.name
super().__init__(*args, **kwargs)
def check_default(self, option: optparse.Option, key: str, val: Any) -> Any:
try:
return option.check_value(key, val)
except optparse.OptionValueError as exc:
print(f"An error occurred during configuration: {exc}")
sys.exit(3)
def _get_ordered_configuration_items(
self,
) -> Generator[Tuple[str, Any], None, None]:
# Configuration gives keys in an unordered manner. Order them.
override_order = ["global", self.name, ":env:"]
# Pool the options into different groups
section_items: Dict[str, List[Tuple[str, Any]]] = {
name: [] for name in override_order
}
for section_key, val in self.config.items():
# ignore empty values
if not val:
logger.debug(
"Ignoring configuration key '%s' as it's value is empty.",
section_key,
)
continue
section, key = section_key.split(".", 1)
if section in override_order:
section_items[section].append((key, val))
# Yield each group in their override order
for section in override_order:
for key, val in section_items[section]:
yield key, val
def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]:
"""Updates the given defaults with values from the config files and
the environ. Does a little special handling for certain types of
options (lists)."""
# Accumulate complex default state.
self.values = optparse.Values(self.defaults)
late_eval = set()
# Then set the options with those values
for key, val in self._get_ordered_configuration_items():
# '--' because configuration supports only long names
option = self.get_option("--" + key)
# Ignore options not present in this parser. E.g. non-globals put
# in [global] by users that want them to apply to all applicable
# commands.
if option is None:
continue
assert option.dest is not None
if option.action in ("store_true", "store_false"):
try:
val = strtobool(val)
except ValueError:
self.error(
"{} is not a valid value for {} option, " # noqa
"please specify a boolean value like yes/no, "
"true/false or 1/0 instead.".format(val, key)
)
elif option.action == "count":
with suppress(ValueError):
val = strtobool(val)
with suppress(ValueError):
val = int(val)
if not isinstance(val, int) or val < 0:
self.error(
"{} is not a valid value for {} option, " # noqa
"please instead specify either a non-negative integer "
"or a boolean value like yes/no or false/true "
"which is equivalent to 1/0.".format(val, key)
)
elif option.action == "append":
val = val.split()
val = [self.check_default(option, key, v) for v in val]
elif option.action == "callback":
assert option.callback is not None
late_eval.add(option.dest)
opt_str = option.get_opt_string()
val = option.convert_value(opt_str, val)
# From take_action
args = option.callback_args or ()
kwargs = option.callback_kwargs or {}
option.callback(option, opt_str, val, self, *args, **kwargs)
else:
val = self.check_default(option, key, val)
defaults[option.dest] = val
for key in late_eval:
defaults[key] = getattr(self.values, key)
self.values = None
return defaults
def get_default_values(self) -> optparse.Values:
"""Overriding to make updating the defaults after instantiation of
the option parser possible, _update_defaults() does the dirty work."""
if not self.process_default_values:
# Old, pre-Optik 1.5 behaviour.
return optparse.Values(self.defaults)
# Load the configuration, or error out in case of an error
try:
self.config.load()
except ConfigurationError as err:
self.exit(UNKNOWN_ERROR, str(err))
defaults = self._update_defaults(self.defaults.copy()) # ours
for option in self._get_all_options():
assert option.dest is not None
default = defaults.get(option.dest)
if isinstance(default, str):
opt_str = option.get_opt_string()
defaults[option.dest] = option.check_value(opt_str, default)
return optparse.Values(defaults)
def error(self, msg: str) -> None:
self.print_usage(sys.stderr)
self.exit(UNKNOWN_ERROR, f"{msg}\n")

@ -0,0 +1,68 @@
import functools
from typing import Callable, Generator, Iterable, Iterator, Optional, Tuple
from pip._vendor.rich.progress import (
BarColumn,
DownloadColumn,
FileSizeColumn,
Progress,
ProgressColumn,
SpinnerColumn,
TextColumn,
TimeElapsedColumn,
TimeRemainingColumn,
TransferSpeedColumn,
)
from pip._internal.utils.logging import get_indentation
DownloadProgressRenderer = Callable[[Iterable[bytes]], Iterator[bytes]]
def _rich_progress_bar(
iterable: Iterable[bytes],
*,
bar_type: str,
size: int,
) -> Generator[bytes, None, None]:
assert bar_type == "on", "This should only be used in the default mode."
if not size:
total = float("inf")
columns: Tuple[ProgressColumn, ...] = (
TextColumn("[progress.description]{task.description}"),
SpinnerColumn("line", speed=1.5),
FileSizeColumn(),
TransferSpeedColumn(),
TimeElapsedColumn(),
)
else:
total = size
columns = (
TextColumn("[progress.description]{task.description}"),
BarColumn(),
DownloadColumn(),
TransferSpeedColumn(),
TextColumn("eta"),
TimeRemainingColumn(),
)
progress = Progress(*columns, refresh_per_second=30)
task_id = progress.add_task(" " * (get_indentation() + 2), total=total)
with progress:
for chunk in iterable:
yield chunk
progress.update(task_id, advance=len(chunk))
def get_download_progress_renderer(
*, bar_type: str, size: Optional[int] = None
) -> DownloadProgressRenderer:
"""Get an object that can be used to render the download progress.
Returns a callable, that takes an iterable to "wrap".
"""
if bar_type == "on":
return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size)
else:
return iter # no-op, when passed an iterator

@ -0,0 +1,508 @@
"""Contains the Command base classes that depend on PipSession.
The classes in this module are in a separate module so the commands not
needing download / PackageFinder capability don't unnecessarily import the
PackageFinder machinery and all its vendored dependencies, etc.
"""
import logging
import os
import sys
from functools import partial
from optparse import Values
from typing import TYPE_CHECKING, Any, List, Optional, Tuple
from pip._internal.cache import WheelCache
from pip._internal.cli import cmdoptions
from pip._internal.cli.base_command import Command
from pip._internal.cli.command_context import CommandContextMixIn
from pip._internal.exceptions import CommandError, PreviousBuildDirError
from pip._internal.index.collector import LinkCollector
from pip._internal.index.package_finder import PackageFinder
from pip._internal.models.selection_prefs import SelectionPreferences
from pip._internal.models.target_python import TargetPython
from pip._internal.network.session import PipSession
from pip._internal.operations.build.build_tracker import BuildTracker
from pip._internal.operations.prepare import RequirementPreparer
from pip._internal.req.constructors import (
install_req_from_editable,
install_req_from_line,
install_req_from_parsed_requirement,
install_req_from_req_string,
)
from pip._internal.req.req_file import parse_requirements
from pip._internal.req.req_install import InstallRequirement
from pip._internal.resolution.base import BaseResolver
from pip._internal.self_outdated_check import pip_self_version_check
from pip._internal.utils.temp_dir import (
TempDirectory,
TempDirectoryTypeRegistry,
tempdir_kinds,
)
from pip._internal.utils.virtualenv import running_under_virtualenv
if TYPE_CHECKING:
from ssl import SSLContext
logger = logging.getLogger(__name__)
def _create_truststore_ssl_context() -> Optional["SSLContext"]:
if sys.version_info < (3, 10):
raise CommandError("The truststore feature is only available for Python 3.10+")
try:
import ssl
except ImportError:
logger.warning("Disabling truststore since ssl support is missing")
return None
try:
import truststore
except ImportError:
raise CommandError(
"To use the truststore feature, 'truststore' must be installed into "
"pip's current environment."
)
return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
class SessionCommandMixin(CommandContextMixIn):
"""
A class mixin for command classes needing _build_session().
"""
def __init__(self) -> None:
super().__init__()
self._session: Optional[PipSession] = None
@classmethod
def _get_index_urls(cls, options: Values) -> Optional[List[str]]:
"""Return a list of index urls from user-provided options."""
index_urls = []
if not getattr(options, "no_index", False):
url = getattr(options, "index_url", None)
if url:
index_urls.append(url)
urls = getattr(options, "extra_index_urls", None)
if urls:
index_urls.extend(urls)
# Return None rather than an empty list
return index_urls or None
def get_default_session(self, options: Values) -> PipSession:
"""Get a default-managed session."""
if self._session is None:
self._session = self.enter_context(self._build_session(options))
# there's no type annotation on requests.Session, so it's
# automatically ContextManager[Any] and self._session becomes Any,
# then https://github.com/python/mypy/issues/7696 kicks in
assert self._session is not None
return self._session
def _build_session(
self,
options: Values,
retries: Optional[int] = None,
timeout: Optional[int] = None,
fallback_to_certifi: bool = False,
) -> PipSession:
cache_dir = options.cache_dir
assert not cache_dir or os.path.isabs(cache_dir)
if "truststore" in options.features_enabled:
try:
ssl_context = _create_truststore_ssl_context()
except Exception:
if not fallback_to_certifi:
raise
ssl_context = None
else:
ssl_context = None
session = PipSession(
cache=os.path.join(cache_dir, "http") if cache_dir else None,
retries=retries if retries is not None else options.retries,
trusted_hosts=options.trusted_hosts,
index_urls=self._get_index_urls(options),
ssl_context=ssl_context,
)
# Handle custom ca-bundles from the user
if options.cert:
session.verify = options.cert
# Handle SSL client certificate
if options.client_cert:
session.cert = options.client_cert
# Handle timeouts
if options.timeout or timeout:
session.timeout = timeout if timeout is not None else options.timeout
# Handle configured proxies
if options.proxy:
session.proxies = {
"http": options.proxy,
"https": options.proxy,
}
# Determine if we can prompt the user for authentication or not
session.auth.prompting = not options.no_input
session.auth.keyring_provider = options.keyring_provider
return session
class IndexGroupCommand(Command, SessionCommandMixin):
"""
Abstract base class for commands with the index_group options.
This also corresponds to the commands that permit the pip version check.
"""
def handle_pip_version_check(self, options: Values) -> None:
"""
Do the pip version check if not disabled.
This overrides the default behavior of not doing the check.
"""
# Make sure the index_group options are present.
assert hasattr(options, "no_index")
if options.disable_pip_version_check or options.no_index:
return
# Otherwise, check if we're using the latest version of pip available.
session = self._build_session(
options,
retries=0,
timeout=min(5, options.timeout),
# This is set to ensure the function does not fail when truststore is
# specified in use-feature but cannot be loaded. This usually raises a
# CommandError and shows a nice user-facing error, but this function is not
# called in that try-except block.
fallback_to_certifi=True,
)
with session:
pip_self_version_check(session, options)
KEEPABLE_TEMPDIR_TYPES = [
tempdir_kinds.BUILD_ENV,
tempdir_kinds.EPHEM_WHEEL_CACHE,
tempdir_kinds.REQ_BUILD,
]
def warn_if_run_as_root() -> None:
"""Output a warning for sudo users on Unix.
In a virtual environment, sudo pip still writes to virtualenv.
On Windows, users may run pip as Administrator without issues.
This warning only applies to Unix root users outside of virtualenv.
"""
if running_under_virtualenv():
return
if not hasattr(os, "getuid"):
return
# On Windows, there are no "system managed" Python packages. Installing as
# Administrator via pip is the correct way of updating system environments.
#
# We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform
# checks: https://mypy.readthedocs.io/en/stable/common_issues.html
if sys.platform == "win32" or sys.platform == "cygwin":
return
if os.getuid() != 0:
return
logger.warning(
"Running pip as the 'root' user can result in broken permissions and "
"conflicting behaviour with the system package manager. "
"It is recommended to use a virtual environment instead: "
"https://pip.pypa.io/warnings/venv"
)
def with_cleanup(func: Any) -> Any:
"""Decorator for common logic related to managing temporary
directories.
"""
def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None:
for t in KEEPABLE_TEMPDIR_TYPES:
registry.set_delete(t, False)
def wrapper(
self: RequirementCommand, options: Values, args: List[Any]
) -> Optional[int]:
assert self.tempdir_registry is not None
if options.no_clean:
configure_tempdir_registry(self.tempdir_registry)
try:
return func(self, options, args)
except PreviousBuildDirError:
# This kind of conflict can occur when the user passes an explicit
# build directory with a pre-existing folder. In that case we do
# not want to accidentally remove it.
configure_tempdir_registry(self.tempdir_registry)
raise
return wrapper
class RequirementCommand(IndexGroupCommand):
def __init__(self, *args: Any, **kw: Any) -> None:
super().__init__(*args, **kw)
self.cmd_opts.add_option(cmdoptions.no_clean())
@staticmethod
def determine_resolver_variant(options: Values) -> str:
"""Determines which resolver should be used, based on the given options."""
if "legacy-resolver" in options.deprecated_features_enabled:
return "legacy"
return "2020-resolver"
@classmethod
def make_requirement_preparer(
cls,
temp_build_dir: TempDirectory,
options: Values,
build_tracker: BuildTracker,
session: PipSession,
finder: PackageFinder,
use_user_site: bool,
download_dir: Optional[str] = None,
verbosity: int = 0,
) -> RequirementPreparer:
"""
Create a RequirementPreparer instance for the given parameters.
"""
temp_build_dir_path = temp_build_dir.path
assert temp_build_dir_path is not None
legacy_resolver = False
resolver_variant = cls.determine_resolver_variant(options)
if resolver_variant == "2020-resolver":
lazy_wheel = "fast-deps" in options.features_enabled
if lazy_wheel:
logger.warning(
"pip is using lazily downloaded wheels using HTTP "
"range requests to obtain dependency information. "
"This experimental feature is enabled through "
"--use-feature=fast-deps and it is not ready for "
"production."
)
else:
legacy_resolver = True
lazy_wheel = False
if "fast-deps" in options.features_enabled:
logger.warning(
"fast-deps has no effect when used with the legacy resolver."
)
return RequirementPreparer(
build_dir=temp_build_dir_path,
src_dir=options.src_dir,
download_dir=download_dir,
build_isolation=options.build_isolation,
check_build_deps=options.check_build_deps,
build_tracker=build_tracker,
session=session,
progress_bar=options.progress_bar,
finder=finder,
require_hashes=options.require_hashes,
use_user_site=use_user_site,
lazy_wheel=lazy_wheel,
verbosity=verbosity,
legacy_resolver=legacy_resolver,
)
@classmethod
def make_resolver(
cls,
preparer: RequirementPreparer,
finder: PackageFinder,
options: Values,
wheel_cache: Optional[WheelCache] = None,
use_user_site: bool = False,
ignore_installed: bool = True,
ignore_requires_python: bool = False,
force_reinstall: bool = False,
upgrade_strategy: str = "to-satisfy-only",
use_pep517: Optional[bool] = None,
py_version_info: Optional[Tuple[int, ...]] = None,
) -> BaseResolver:
"""
Create a Resolver instance for the given parameters.
"""
make_install_req = partial(
install_req_from_req_string,
isolated=options.isolated_mode,
use_pep517=use_pep517,
)
resolver_variant = cls.determine_resolver_variant(options)
# The long import name and duplicated invocation is needed to convince
# Mypy into correctly typechecking. Otherwise it would complain the
# "Resolver" class being redefined.
if resolver_variant == "2020-resolver":
import pip._internal.resolution.resolvelib.resolver
return pip._internal.resolution.resolvelib.resolver.Resolver(
preparer=preparer,
finder=finder,
wheel_cache=wheel_cache,
make_install_req=make_install_req,
use_user_site=use_user_site,
ignore_dependencies=options.ignore_dependencies,
ignore_installed=ignore_installed,
ignore_requires_python=ignore_requires_python,
force_reinstall=force_reinstall,
upgrade_strategy=upgrade_strategy,
py_version_info=py_version_info,
)
import pip._internal.resolution.legacy.resolver
return pip._internal.resolution.legacy.resolver.Resolver(
preparer=preparer,
finder=finder,
wheel_cache=wheel_cache,
make_install_req=make_install_req,
use_user_site=use_user_site,
ignore_dependencies=options.ignore_dependencies,
ignore_installed=ignore_installed,
ignore_requires_python=ignore_requires_python,
force_reinstall=force_reinstall,
upgrade_strategy=upgrade_strategy,
py_version_info=py_version_info,
)
def get_requirements(
self,
args: List[str],
options: Values,
finder: PackageFinder,
session: PipSession,
) -> List[InstallRequirement]:
"""
Parse command-line arguments into the corresponding requirements.
"""
requirements: List[InstallRequirement] = []
for filename in options.constraints:
for parsed_req in parse_requirements(
filename,
constraint=True,
finder=finder,
options=options,
session=session,
):
req_to_add = install_req_from_parsed_requirement(
parsed_req,
isolated=options.isolated_mode,
user_supplied=False,
)
requirements.append(req_to_add)
for req in args:
req_to_add = install_req_from_line(
req,
comes_from=None,
isolated=options.isolated_mode,
use_pep517=options.use_pep517,
user_supplied=True,
config_settings=getattr(options, "config_settings", None),
)
requirements.append(req_to_add)
for req in options.editables:
req_to_add = install_req_from_editable(
req,
user_supplied=True,
isolated=options.isolated_mode,
use_pep517=options.use_pep517,
config_settings=getattr(options, "config_settings", None),
)
requirements.append(req_to_add)
# NOTE: options.require_hashes may be set if --require-hashes is True
for filename in options.requirements:
for parsed_req in parse_requirements(
filename, finder=finder, options=options, session=session
):
req_to_add = install_req_from_parsed_requirement(
parsed_req,
isolated=options.isolated_mode,
use_pep517=options.use_pep517,
user_supplied=True,
config_settings=parsed_req.options.get("config_settings")
if parsed_req.options
else None,
)
requirements.append(req_to_add)
# If any requirement has hash options, enable hash checking.
if any(req.has_hash_options for req in requirements):
options.require_hashes = True
if not (args or options.editables or options.requirements):
opts = {"name": self.name}
if options.find_links:
raise CommandError(
"You must give at least one requirement to {name} "
'(maybe you meant "pip {name} {links}"?)'.format(
**dict(opts, links=" ".join(options.find_links))
)
)
else:
raise CommandError(
"You must give at least one requirement to {name} "
'(see "pip help {name}")'.format(**opts)
)
return requirements
@staticmethod
def trace_basic_info(finder: PackageFinder) -> None:
"""
Trace basic information about the provided objects.
"""
# Display where finder is looking for packages
search_scope = finder.search_scope
locations = search_scope.get_formatted_locations()
if locations:
logger.info(locations)
def _build_package_finder(
self,
options: Values,
session: PipSession,
target_python: Optional[TargetPython] = None,
ignore_requires_python: Optional[bool] = None,
) -> PackageFinder:
"""
Create a package finder appropriate to this requirement command.
:param ignore_requires_python: Whether to ignore incompatible
"Requires-Python" values in links. Defaults to False.
"""
link_collector = LinkCollector.create(session, options=options)
selection_prefs = SelectionPreferences(
allow_yanked=True,
format_control=options.format_control,
allow_all_prereleases=options.pre,
prefer_binary=options.prefer_binary,
ignore_requires_python=ignore_requires_python,
)
return PackageFinder.create(
link_collector=link_collector,
selection_prefs=selection_prefs,
target_python=target_python,
)

@ -0,0 +1,159 @@
import contextlib
import itertools
import logging
import sys
import time
from typing import IO, Generator, Optional
from pip._internal.utils.compat import WINDOWS
from pip._internal.utils.logging import get_indentation
logger = logging.getLogger(__name__)
class SpinnerInterface:
def spin(self) -> None:
raise NotImplementedError()
def finish(self, final_status: str) -> None:
raise NotImplementedError()
class InteractiveSpinner(SpinnerInterface):
def __init__(
self,
message: str,
file: Optional[IO[str]] = None,
spin_chars: str = "-\\|/",
# Empirically, 8 updates/second looks nice
min_update_interval_seconds: float = 0.125,
):
self._message = message
if file is None:
file = sys.stdout
self._file = file
self._rate_limiter = RateLimiter(min_update_interval_seconds)
self._finished = False
self._spin_cycle = itertools.cycle(spin_chars)
self._file.write(" " * get_indentation() + self._message + " ... ")
self._width = 0
def _write(self, status: str) -> None:
assert not self._finished
# Erase what we wrote before by backspacing to the beginning, writing
# spaces to overwrite the old text, and then backspacing again
backup = "\b" * self._width
self._file.write(backup + " " * self._width + backup)
# Now we have a blank slate to add our status
self._file.write(status)
self._width = len(status)
self._file.flush()
self._rate_limiter.reset()
def spin(self) -> None:
if self._finished:
return
if not self._rate_limiter.ready():
return
self._write(next(self._spin_cycle))
def finish(self, final_status: str) -> None:
if self._finished:
return
self._write(final_status)
self._file.write("\n")
self._file.flush()
self._finished = True
# Used for dumb terminals, non-interactive installs (no tty), etc.
# We still print updates occasionally (once every 60 seconds by default) to
# act as a keep-alive for systems like Travis-CI that take lack-of-output as
# an indication that a task has frozen.
class NonInteractiveSpinner(SpinnerInterface):
def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None:
self._message = message
self._finished = False
self._rate_limiter = RateLimiter(min_update_interval_seconds)
self._update("started")
def _update(self, status: str) -> None:
assert not self._finished
self._rate_limiter.reset()
logger.info("%s: %s", self._message, status)
def spin(self) -> None:
if self._finished:
return
if not self._rate_limiter.ready():
return
self._update("still running...")
def finish(self, final_status: str) -> None:
if self._finished:
return
self._update(f"finished with status '{final_status}'")
self._finished = True
class RateLimiter:
def __init__(self, min_update_interval_seconds: float) -> None:
self._min_update_interval_seconds = min_update_interval_seconds
self._last_update: float = 0
def ready(self) -> bool:
now = time.time()
delta = now - self._last_update
return delta >= self._min_update_interval_seconds
def reset(self) -> None:
self._last_update = time.time()
@contextlib.contextmanager
def open_spinner(message: str) -> Generator[SpinnerInterface, None, None]:
# Interactive spinner goes directly to sys.stdout rather than being routed
# through the logging system, but it acts like it has level INFO,
# i.e. it's only displayed if we're at level INFO or better.
# Non-interactive spinner goes through the logging system, so it is always
# in sync with logging configuration.
if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
spinner: SpinnerInterface = InteractiveSpinner(message)
else:
spinner = NonInteractiveSpinner(message)
try:
with hidden_cursor(sys.stdout):
yield spinner
except KeyboardInterrupt:
spinner.finish("canceled")
raise
except Exception:
spinner.finish("error")
raise
else:
spinner.finish("done")
HIDE_CURSOR = "\x1b[?25l"
SHOW_CURSOR = "\x1b[?25h"
@contextlib.contextmanager
def hidden_cursor(file: IO[str]) -> Generator[None, None, None]:
# The Windows terminal does not support the hide/show cursor ANSI codes,
# even via colorama. So don't even try.
if WINDOWS:
yield
# We don't want to clutter the output with control characters if we're
# writing to a file, or if the user is running with --quiet.
# See https://github.com/pypa/pip/issues/3418
elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
yield
else:
file.write(HIDE_CURSOR)
try:
yield
finally:
file.write(SHOW_CURSOR)

@ -0,0 +1,6 @@
SUCCESS = 0
ERROR = 1
UNKNOWN_ERROR = 2
VIRTUALENV_NOT_FOUND = 3
PREVIOUS_BUILD_DIR_ERROR = 4
NO_MATCHES_FOUND = 23

@ -0,0 +1,132 @@
"""
Package containing all pip commands
"""
import importlib
from collections import namedtuple
from typing import Any, Dict, Optional
from pip._internal.cli.base_command import Command
CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary")
# This dictionary does a bunch of heavy lifting for help output:
# - Enables avoiding additional (costly) imports for presenting `--help`.
# - The ordering matters for help display.
#
# Even though the module path starts with the same "pip._internal.commands"
# prefix, the full path makes testing easier (specifically when modifying
# `commands_dict` in test setup / teardown).
commands_dict: Dict[str, CommandInfo] = {
"install": CommandInfo(
"pip._internal.commands.install",
"InstallCommand",
"Install packages.",
),
"download": CommandInfo(
"pip._internal.commands.download",
"DownloadCommand",
"Download packages.",
),
"uninstall": CommandInfo(
"pip._internal.commands.uninstall",
"UninstallCommand",
"Uninstall packages.",
),
"freeze": CommandInfo(
"pip._internal.commands.freeze",
"FreezeCommand",
"Output installed packages in requirements format.",
),
"inspect": CommandInfo(
"pip._internal.commands.inspect",
"InspectCommand",
"Inspect the python environment.",
),
"list": CommandInfo(
"pip._internal.commands.list",
"ListCommand",
"List installed packages.",
),
"show": CommandInfo(
"pip._internal.commands.show",
"ShowCommand",
"Show information about installed packages.",
),
"check": CommandInfo(
"pip._internal.commands.check",
"CheckCommand",
"Verify installed packages have compatible dependencies.",
),
"config": CommandInfo(
"pip._internal.commands.configuration",
"ConfigurationCommand",
"Manage local and global configuration.",
),
"search": CommandInfo(
"pip._internal.commands.search",
"SearchCommand",
"Search PyPI for packages.",
),
"cache": CommandInfo(
"pip._internal.commands.cache",
"CacheCommand",
"Inspect and manage pip's wheel cache.",
),
"index": CommandInfo(
"pip._internal.commands.index",
"IndexCommand",
"Inspect information available from package indexes.",
),
"wheel": CommandInfo(
"pip._internal.commands.wheel",
"WheelCommand",
"Build wheels from your requirements.",
),
"hash": CommandInfo(
"pip._internal.commands.hash",
"HashCommand",
"Compute hashes of package archives.",
),
"completion": CommandInfo(
"pip._internal.commands.completion",
"CompletionCommand",
"A helper command used for command completion.",
),
"debug": CommandInfo(
"pip._internal.commands.debug",
"DebugCommand",
"Show information useful for debugging.",
),
"help": CommandInfo(
"pip._internal.commands.help",
"HelpCommand",
"Show help for commands.",
),
}
def create_command(name: str, **kwargs: Any) -> Command:
"""
Create an instance of the Command class with the given name.
"""
module_path, class_name, summary = commands_dict[name]
module = importlib.import_module(module_path)
command_class = getattr(module, class_name)
command = command_class(name=name, summary=summary, **kwargs)
return command
def get_similar_commands(name: str) -> Optional[str]:
"""Command name auto-correct."""
from difflib import get_close_matches
name = name.lower()
close_commands = get_close_matches(name, commands_dict.keys())
if close_commands:
return close_commands[0]
else:
return None

@ -0,0 +1,222 @@
import os
import textwrap
from optparse import Values
from typing import Any, List
import pip._internal.utils.filesystem as filesystem
from pip._internal.cli.base_command import Command
from pip._internal.cli.status_codes import ERROR, SUCCESS
from pip._internal.exceptions import CommandError, PipError
from pip._internal.utils.logging import getLogger
logger = getLogger(__name__)
class CacheCommand(Command):
"""
Inspect and manage pip's wheel cache.
Subcommands:
- dir: Show the cache directory.
- info: Show information about the cache.
- list: List filenames of packages stored in the cache.
- remove: Remove one or more package from the cache.
- purge: Remove all items from the cache.
``<pattern>`` can be a glob expression or a package name.
"""
ignore_require_venv = True
usage = """
%prog dir
%prog info
%prog list [<pattern>] [--format=[human, abspath]]
%prog remove <pattern>
%prog purge
"""
def add_options(self) -> None:
self.cmd_opts.add_option(
"--format",
action="store",
dest="list_format",
default="human",
choices=("human", "abspath"),
help="Select the output format among: human (default) or abspath",
)
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options: Values, args: List[str]) -> int:
handlers = {
"dir": self.get_cache_dir,
"info": self.get_cache_info,
"list": self.list_cache_items,
"remove": self.remove_cache_items,
"purge": self.purge_cache,
}
if not options.cache_dir:
logger.error("pip cache commands can not function since cache is disabled.")
return ERROR
# Determine action
if not args or args[0] not in handlers:
logger.error(
"Need an action (%s) to perform.",
", ".join(sorted(handlers)),
)
return ERROR
action = args[0]
# Error handling happens here, not in the action-handlers.
try:
handlers[action](options, args[1:])
except PipError as e:
logger.error(e.args[0])
return ERROR
return SUCCESS
def get_cache_dir(self, options: Values, args: List[Any]) -> None:
if args:
raise CommandError("Too many arguments")
logger.info(options.cache_dir)
def get_cache_info(self, options: Values, args: List[Any]) -> None:
if args:
raise CommandError("Too many arguments")
num_http_files = len(self._find_http_files(options))
num_packages = len(self._find_wheels(options, "*"))
http_cache_location = self._cache_dir(options, "http")
wheels_cache_location = self._cache_dir(options, "wheels")
http_cache_size = filesystem.format_directory_size(http_cache_location)
wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
message = (
textwrap.dedent(
"""
Package index page cache location: {http_cache_location}
Package index page cache size: {http_cache_size}
Number of HTTP files: {num_http_files}
Locally built wheels location: {wheels_cache_location}
Locally built wheels size: {wheels_cache_size}
Number of locally built wheels: {package_count}
"""
)
.format(
http_cache_location=http_cache_location,
http_cache_size=http_cache_size,
num_http_files=num_http_files,
wheels_cache_location=wheels_cache_location,
package_count=num_packages,
wheels_cache_size=wheels_cache_size,
)
.strip()
)
logger.info(message)
def list_cache_items(self, options: Values, args: List[Any]) -> None:
if len(args) > 1:
raise CommandError("Too many arguments")
if args:
pattern = args[0]
else:
pattern = "*"
files = self._find_wheels(options, pattern)
if options.list_format == "human":
self.format_for_human(files)
else:
self.format_for_abspath(files)
def format_for_human(self, files: List[str]) -> None:
if not files:
logger.info("No locally built wheels cached.")
return
results = []
for filename in files:
wheel = os.path.basename(filename)
size = filesystem.format_file_size(filename)
results.append(f" - {wheel} ({size})")
logger.info("Cache contents:\n")
logger.info("\n".join(sorted(results)))
def format_for_abspath(self, files: List[str]) -> None:
if not files:
return
results = []
for filename in files:
results.append(filename)
logger.info("\n".join(sorted(results)))
def remove_cache_items(self, options: Values, args: List[Any]) -> None:
if len(args) > 1:
raise CommandError("Too many arguments")
if not args:
raise CommandError("Please provide a pattern")
files = self._find_wheels(options, args[0])
no_matching_msg = "No matching packages"
if args[0] == "*":
# Only fetch http files if no specific pattern given
files += self._find_http_files(options)
else:
# Add the pattern to the log message
no_matching_msg += ' for pattern "{}"'.format(args[0])
if not files:
logger.warning(no_matching_msg)
for filename in files:
os.unlink(filename)
logger.verbose("Removed %s", filename)
logger.info("Files removed: %s", len(files))
def purge_cache(self, options: Values, args: List[Any]) -> None:
if args:
raise CommandError("Too many arguments")
return self.remove_cache_items(options, ["*"])
def _cache_dir(self, options: Values, subdir: str) -> str:
return os.path.join(options.cache_dir, subdir)
def _find_http_files(self, options: Values) -> List[str]:
http_dir = self._cache_dir(options, "http")
return filesystem.find_files(http_dir, "*")
def _find_wheels(self, options: Values, pattern: str) -> List[str]:
wheel_dir = self._cache_dir(options, "wheels")
# The wheel filename format, as specified in PEP 427, is:
# {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
#
# Additionally, non-alphanumeric values in the distribution are
# normalized to underscores (_), meaning hyphens can never occur
# before `-{version}`.
#
# Given that information:
# - If the pattern we're given contains a hyphen (-), the user is
# providing at least the version. Thus, we can just append `*.whl`
# to match the rest of it.
# - If the pattern we're given doesn't contain a hyphen (-), the
# user is only providing the name. Thus, we append `-*.whl` to
# match the hyphen before the version, followed by anything else.
#
# PEP 427: https://www.python.org/dev/peps/pep-0427/
pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl")
return filesystem.find_files(wheel_dir, pattern)

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save