How to serialize SqlAlchemy result to JSON? How to serialize SqlAlchemy result to JSON? python python

How to serialize SqlAlchemy result to JSON?


You could just output your object as a dictionary:

class User:   def as_dict(self):       return {c.name: getattr(self, c.name) for c in self.__table__.columns}

And then you use User.as_dict() to serialize your object.

As explained in Convert sqlalchemy row object to python dict


A flat implementation

You could use something like this:

from sqlalchemy.ext.declarative import DeclarativeMetaclass AlchemyEncoder(json.JSONEncoder):    def default(self, obj):        if isinstance(obj.__class__, DeclarativeMeta):            # an SQLAlchemy class            fields = {}            for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata']:                data = obj.__getattribute__(field)                try:                    json.dumps(data) # this will fail on non-encodable values, like other classes                    fields[field] = data                except TypeError:                    fields[field] = None            # a json-encodable dict            return fields        return json.JSONEncoder.default(self, obj)

and then convert to JSON using:

c = YourAlchemyClass()print json.dumps(c, cls=AlchemyEncoder)

It will ignore fields that are not encodable (set them to 'None').

It doesn't auto-expand relations (since this could lead to self-references, and loop forever).

A recursive, non-circular implementation

If, however, you'd rather loop forever, you could use:

from sqlalchemy.ext.declarative import DeclarativeMetadef new_alchemy_encoder():    _visited_objs = []    class AlchemyEncoder(json.JSONEncoder):        def default(self, obj):            if isinstance(obj.__class__, DeclarativeMeta):                # don't re-visit self                if obj in _visited_objs:                    return None                _visited_objs.append(obj)                # an SQLAlchemy class                fields = {}                for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata']:                    fields[field] = obj.__getattribute__(field)                # a json-encodable dict                return fields            return json.JSONEncoder.default(self, obj)    return AlchemyEncoder

And then encode objects using:

print json.dumps(e, cls=new_alchemy_encoder(), check_circular=False)

This would encode all children, and all their children, and all their children... Potentially encode your entire database, basically. When it reaches something its encoded before, it will encode it as 'None'.

A recursive, possibly-circular, selective implementation

Another alternative, probably better, is to be able to specify the fields you want to expand:

def new_alchemy_encoder(revisit_self = False, fields_to_expand = []):    _visited_objs = []    class AlchemyEncoder(json.JSONEncoder):        def default(self, obj):            if isinstance(obj.__class__, DeclarativeMeta):                # don't re-visit self                if revisit_self:                    if obj in _visited_objs:                        return None                    _visited_objs.append(obj)                # go through each field in this SQLalchemy class                fields = {}                for field in [x for x in dir(obj) if not x.startswith('_') and x != 'metadata']:                    val = obj.__getattribute__(field)                    # is this field another SQLalchemy object, or a list of SQLalchemy objects?                    if isinstance(val.__class__, DeclarativeMeta) or (isinstance(val, list) and len(val) > 0 and isinstance(val[0].__class__, DeclarativeMeta)):                        # unless we're expanding this field, stop here                        if field not in fields_to_expand:                            # not expanding this field: set it to None and continue                            fields[field] = None                            continue                    fields[field] = val                # a json-encodable dict                return fields            return json.JSONEncoder.default(self, obj)    return AlchemyEncoder

You can now call it with:

print json.dumps(e, cls=new_alchemy_encoder(False, ['parents']), check_circular=False)

To only expand SQLAlchemy fields called 'parents', for example.


Python 3.7+ and Flask 1.1+ can use the built-in dataclasses package

from dataclasses import dataclassfrom datetime import datetimefrom flask import Flask, jsonifyfrom flask_sqlalchemy import SQLAlchemyapp = Flask(__name__)db = SQLAlchemy(app)@dataclassclass User(db.Model):  id: int  email: str  id = db.Column(db.Integer, primary_key=True, auto_increment=True)  email = db.Column(db.String(200), unique=True)@app.route('/users/')def users():  users = User.query.all()  return jsonify(users)  if __name__ == "__main__":  users = User(email="user1@gmail.com"), User(email="user2@gmail.com")  db.create_all()  db.session.add_all(users)  db.session.commit()  app.run()

The /users/ route will now return a list of users.

[  {"email": "user1@gmail.com", "id": 1},  {"email": "user2@gmail.com", "id": 2}]

Auto-serialize related models

@dataclassclass Account(db.Model):  id: int  users: User  id = db.Column(db.Integer)  users = db.relationship(User)  # User model would need a db.ForeignKey field

The response from jsonify(account) would be this.

{     "id":1,   "users":[        {           "email":"user1@gmail.com",         "id":1      },      {           "email":"user2@gmail.com",         "id":2      }   ]}

Overwrite the default JSON Encoder

from flask.json import JSONEncoderclass CustomJSONEncoder(JSONEncoder):  "Add support for serializing timedeltas"  def default(o):    if type(o) == datetime.timedelta:      return str(o)    elif type(o) == datetime.datetime:      return o.isoformat()    else:      return super().default(o)app.json_encoder = CustomJSONEncoder