gitea/routers/repo/view.go

762 lines
20 KiB
Go
Raw Normal View History

// Copyright 2017 The Gitea Authors. All rights reserved.
2014-07-26 06:24:27 +02:00
// Copyright 2014 The Gogs Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package repo
import (
"bytes"
"encoding/base64"
2016-08-09 21:56:00 +02:00
"fmt"
gotemplate "html/template"
2014-07-26 06:24:27 +02:00
"io/ioutil"
2019-10-13 15:23:14 +02:00
"net/url"
2014-07-26 06:24:27 +02:00
"path"
"strings"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/cache"
"code.gitea.io/gitea/modules/charset"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/highlight"
Git LFS support v2 (#122) * Import github.com/git-lfs/lfs-test-server as lfs module base Imported commit is 3968aac269a77b73924649b9412ae03f7ccd3198 Removed: Dockerfile CONTRIBUTING.md mgmt* script/ vendor/ kvlogger.go .dockerignore .gitignore README.md * Remove config, add JWT support from github.com/mgit-at/lfs-test-server Imported commit f0cdcc5a01599c5a955dc1bbf683bb4acecdba83 * Add LFS settings * Add LFS meta object model * Add LFS routes and initialization * Import github.com/dgrijalva/jwt-go into vendor/ * Adapt LFS module: handlers, routing, meta store * Move LFS routes to /user/repo/info/lfs/* * Add request header checks to LFS BatchHandler / PostHandler * Implement LFS basic authentication * Rework JWT secret generation / load * Implement LFS SSH token authentication with JWT Specification: https://github.com/github/git-lfs/tree/master/docs/api * Integrate LFS settings into install process * Remove LFS objects when repository is deleted Only removes objects from content store when deleted repo is the only referencing repository * Make LFS module stateless Fixes bug where LFS would not work after installation without restarting Gitea * Change 500 'Internal Server Error' to 400 'Bad Request' * Change sql query to xorm call * Remove unneeded type from LFS module * Change internal imports to code.gitea.io/gitea/ * Add Gitea authors copyright * Change basic auth realm to "gitea-lfs" * Add unique indexes to LFS model * Use xorm count function in LFS check on repository delete * Return io.ReadCloser from content store and close after usage * Add LFS info to runWeb() * Export LFS content store base path * LFS file download from UI * Work around git-lfs client issue with unauthenticated requests Returning a dummy Authorization header for unauthenticated requests lets git-lfs client skip asking for auth credentials See: https://github.com/github/git-lfs/issues/1088 * Fix unauthenticated UI downloads from public repositories * Authentication check order, Finish LFS file view logic * Ignore LFS hooks if installed for current OS user Fixes Gitea UI actions for repositories tracking LFS files. Checks for minimum needed git version by parsing the semantic version string. * Hide LFS metafile diff from commit view, marking as binary * Show LFS notice if file in commit view is tracked * Add notbefore/nbf JWT claim * Correct lint suggestions - comments for structs and functions - Add comments to LFS model - Function comment for GetRandomBytesAsBase64 - LFS server function comments and lint variable suggestion * Move secret generation code out of conditional Ensures no LFS code may run with an empty secret * Do not hand out JWT tokens if LFS server support is disabled
2016-12-26 02:16:37 +01:00
"code.gitea.io/gitea/modules/lfs"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/markup"
"code.gitea.io/gitea/modules/setting"
2014-07-26 06:24:27 +02:00
)
const (
tplRepoEMPTY base.TplName = "repo/empty"
tplRepoHome base.TplName = "repo/home"
tplWatchers base.TplName = "repo/watchers"
tplForks base.TplName = "repo/forks"
2019-10-13 15:23:14 +02:00
tplMigrating base.TplName = "repo/migrating"
2014-07-26 06:24:27 +02:00
)
type namedBlob struct {
name string
isSymlink bool
blob *git.Blob
}
// FIXME: There has to be a more efficient way of doing this
func getReadmeFileFromPath(commit *git.Commit, treePath string) (*namedBlob, error) {
tree, err := commit.SubTree(treePath)
if err != nil {
return nil, err
}
entries, err := tree.ListEntries()
if err != nil {
return nil, err
}
var readmeFiles [4]*namedBlob
var exts = []string{".md", ".txt", ""} // sorted by priority
for _, entry := range entries {
if entry.IsDir() {
continue
}
for i, ext := range exts {
if markup.IsReadmeFile(entry.Name(), ext) {
if readmeFiles[i] == nil || base.NaturalSortLess(readmeFiles[i].name, entry.Blob().Name()) {
name := entry.Name()
isSymlink := entry.IsLink()
target := entry
if isSymlink {
target, err = entry.FollowLinks()
if err != nil && !git.IsErrBadLink(err) {
return nil, err
}
}
if target != nil && (target.IsExecutable() || target.IsRegular()) {
readmeFiles[i] = &namedBlob{
name,
isSymlink,
target.Blob(),
}
}
}
}
}
if markup.IsReadmeFile(entry.Name()) {
if readmeFiles[3] == nil || base.NaturalSortLess(readmeFiles[3].name, entry.Blob().Name()) {
name := entry.Name()
isSymlink := entry.IsLink()
if isSymlink {
entry, err = entry.FollowLinks()
if err != nil && !git.IsErrBadLink(err) {
return nil, err
}
}
if entry != nil && (entry.IsExecutable() || entry.IsRegular()) {
readmeFiles[3] = &namedBlob{
name,
isSymlink,
entry.Blob(),
}
}
}
}
}
var readmeFile *namedBlob
for _, f := range readmeFiles {
if f != nil {
readmeFile = f
break
}
}
return readmeFile, nil
}
2016-08-30 11:08:38 +02:00
func renderDirectory(ctx *context.Context, treeLink string) {
tree, err := ctx.Repo.Commit.SubTree(ctx.Repo.TreePath)
if err != nil {
ctx.NotFoundOrServerError("Repo.Commit.SubTree", git.IsErrNotExist, err)
return
}
2014-07-26 06:24:27 +02:00
2016-08-30 11:08:38 +02:00
entries, err := tree.ListEntries()
if err != nil {
ctx.ServerError("ListEntries", err)
2016-08-30 11:08:38 +02:00
return
2014-07-26 06:24:27 +02:00
}
entries.CustomSort(base.NaturalSortLess)
2014-07-26 06:24:27 +02:00
var c git.LastCommitCache
if setting.CacheService.LastCommit.Enabled && ctx.Repo.CommitsCount >= setting.CacheService.LastCommit.CommitsCount {
c = cache.NewLastCommitCache(ctx.Repo.Repository.FullName(), ctx.Repo.GitRepo, int64(setting.CacheService.LastCommit.TTL.Seconds()))
}
Improve listing performance by using go-git (#6478) * Use go-git for tree reading and commit info lookup. Signed-off-by: Filip Navara <navara@emclient.com> * Use TreeEntry.IsRegular() instead of ObjectType that was removed. Signed-off-by: Filip Navara <navara@emclient.com> * Use the treePath to optimize commit info search. Signed-off-by: Filip Navara <navara@emclient.com> * Extract the latest commit at treePath along with the other commits. Signed-off-by: Filip Navara <navara@emclient.com> * Fix listing commit info for a directory that was created in one commit and never modified after. Signed-off-by: Filip Navara <navara@emclient.com> * Avoid nearly all external 'git' invocations when doing directory listing (.editorconfig code path is still hit). Signed-off-by: Filip Navara <navara@emclient.com> * Use go-git for reading blobs. Signed-off-by: Filip Navara <navara@emclient.com> * Make SHA1 type alias for plumbing.Hash in go-git. Signed-off-by: Filip Navara <navara@emclient.com> * Make Signature type alias for object.Signature in go-git. Signed-off-by: Filip Navara <navara@emclient.com> * Fix GetCommitsInfo for repository with only one commit. Signed-off-by: Filip Navara <navara@emclient.com> * Fix PGP signature verification. Signed-off-by: Filip Navara <navara@emclient.com> * Fix issues with walking commit graph across merges. Signed-off-by: Filip Navara <navara@emclient.com> * Fix typo in condition. Signed-off-by: Filip Navara <navara@emclient.com> * Speed up loading branch list by keeping the repository reference (and thus all the loaded packfile indexes). Signed-off-by: Filip Navara <navara@emclient.com> * Fix lising submodules. Signed-off-by: Filip Navara <navara@emclient.com> * Fix build Signed-off-by: Filip Navara <navara@emclient.com> * Add back commit cache because of name-rev Signed-off-by: Filip Navara <navara@emclient.com> * Fix tests Signed-off-by: Filip Navara <navara@emclient.com> * Fix code style * Fix spelling * Address PR feedback Signed-off-by: Filip Navara <navara@emclient.com> * Update vendor module list Signed-off-by: Filip Navara <navara@emclient.com> * Fix getting trees by commit id Signed-off-by: Filip Navara <navara@emclient.com> * Fix remaining unit test failures * Fix GetTreeBySHA * Avoid running `git name-rev` if not necessary Signed-off-by: Filip Navara <navara@emclient.com> * Move Branch code to git module * Clean up GPG signature verification and fix it for tagged commits * Address PR feedback (import formatting, copyright headers) * Make blob lookup by SHA working * Update tests to use public API * Allow getting content from any type of object through the blob interface * Change test to actually expect the object content that is in the GIT repository * Change one more test to actually expect the object content that is in the GIT repository * Add comments
2019-04-19 14:17:27 +02:00
var latestCommit *git.Commit
ctx.Data["Files"], latestCommit, err = entries.GetCommitsInfo(ctx.Repo.Commit, ctx.Repo.TreePath, c)
if err != nil {
ctx.ServerError("GetCommitsInfo", err)
2014-07-26 06:24:27 +02:00
return
}
// 3 for the extensions in exts[] in order
// the last one is for a readme that doesn't
// strictly match an extension
var readmeFiles [4]*namedBlob
var docsEntries [3]*git.TreeEntry
var exts = []string{".md", ".txt", ""} // sorted by priority
2016-08-30 11:08:38 +02:00
for _, entry := range entries {
if entry.IsDir() {
lowerName := strings.ToLower(entry.Name())
switch lowerName {
case "docs":
if entry.Name() == "docs" || docsEntries[0] == nil {
docsEntries[0] = entry
}
case ".gitea":
if entry.Name() == ".gitea" || docsEntries[1] == nil {
docsEntries[1] = entry
}
case ".github":
if entry.Name() == ".github" || docsEntries[2] == nil {
docsEntries[2] = entry
}
}
continue
}
for i, ext := range exts {
if markup.IsReadmeFile(entry.Name(), ext) {
log.Debug("%s", entry.Name())
name := entry.Name()
isSymlink := entry.IsLink()
target := entry
if isSymlink {
target, err = entry.FollowLinks()
if err != nil && !git.IsErrBadLink(err) {
ctx.ServerError("FollowLinks", err)
return
}
}
log.Debug("%t", target == nil)
if target != nil && (target.IsExecutable() || target.IsRegular()) {
readmeFiles[i] = &namedBlob{
name,
isSymlink,
target.Blob(),
}
}
}
}
if markup.IsReadmeFile(entry.Name()) {
name := entry.Name()
isSymlink := entry.IsLink()
if isSymlink {
entry, err = entry.FollowLinks()
if err != nil && !git.IsErrBadLink(err) {
ctx.ServerError("FollowLinks", err)
return
}
}
if entry != nil && (entry.IsExecutable() || entry.IsRegular()) {
readmeFiles[3] = &namedBlob{
name,
isSymlink,
entry.Blob(),
}
}
2016-08-30 11:08:38 +02:00
}
}
2016-08-30 11:08:38 +02:00
var readmeFile *namedBlob
readmeTreelink := treeLink
for _, f := range readmeFiles {
if f != nil {
readmeFile = f
break
}
2016-08-30 11:08:38 +02:00
}
if ctx.Repo.TreePath == "" && readmeFile == nil {
for _, entry := range docsEntries {
if entry == nil {
continue
}
readmeFile, err = getReadmeFileFromPath(ctx.Repo.Commit, entry.GetSubJumpablePathName())
if err != nil {
ctx.ServerError("getReadmeFileFromPath", err)
return
}
if readmeFile != nil {
readmeFile.name = entry.Name() + "/" + readmeFile.name
readmeTreelink = treeLink + "/" + entry.GetSubJumpablePathName()
break
}
}
}
2016-08-30 11:08:38 +02:00
if readmeFile != nil {
2016-08-31 22:59:23 +02:00
ctx.Data["RawFileLink"] = ""
2016-08-30 11:08:38 +02:00
ctx.Data["ReadmeInList"] = true
ctx.Data["ReadmeExist"] = true
ctx.Data["FileIsSymlink"] = readmeFile.isSymlink
2016-08-30 11:08:38 +02:00
dataRc, err := readmeFile.blob.DataAsync()
if err != nil {
ctx.ServerError("Data", err)
2014-07-26 06:24:27 +02:00
return
}
defer dataRc.Close()
2014-07-26 06:24:27 +02:00
buf := make([]byte, 1024)
n, _ := dataRc.Read(buf)
2016-08-31 22:59:23 +02:00
buf = buf[:n]
2016-08-30 11:08:38 +02:00
isTextFile := base.IsTextFile(buf)
ctx.Data["FileIsText"] = isTextFile
ctx.Data["FileName"] = readmeFile.name
fileSize := int64(0)
isLFSFile := false
ctx.Data["IsLFSFile"] = false
2016-08-30 11:08:38 +02:00
// FIXME: what happens when README file is an image?
if isTextFile && setting.LFS.StartServer {
meta := lfs.IsPointerFile(&buf)
if meta != nil {
meta, err = ctx.Repo.Repository.GetLFSMetaObjectByOid(meta.Oid)
if err != nil && err != models.ErrLFSObjectNotExist {
ctx.ServerError("GetLFSMetaObject", err)
return
}
}
if meta != nil {
ctx.Data["IsLFSFile"] = true
isLFSFile = true
// OK read the lfs object
var err error
dataRc, err = lfs.ReadMetaObject(meta)
if err != nil {
ctx.ServerError("ReadMetaObject", err)
return
}
defer dataRc.Close()
buf = make([]byte, 1024)
n, err = dataRc.Read(buf)
if err != nil {
ctx.ServerError("Data", err)
return
}
buf = buf[:n]
isTextFile = base.IsTextFile(buf)
ctx.Data["IsTextFile"] = isTextFile
fileSize = meta.Size
ctx.Data["FileSize"] = meta.Size
filenameBase64 := base64.RawURLEncoding.EncodeToString([]byte(readmeFile.name))
ctx.Data["RawFileLink"] = fmt.Sprintf("%s%s.git/info/lfs/objects/%s/%s", setting.AppURL, ctx.Repo.Repository.FullName(), meta.Oid, filenameBase64)
}
}
if !isLFSFile {
fileSize = readmeFile.blob.Size()
}
2016-08-30 11:08:38 +02:00
if isTextFile {
if fileSize >= setting.UI.MaxDisplayFileSize {
// Pretend that this is a normal text file to display 'This file is too large to be shown'
ctx.Data["IsFileTooLarge"] = true
ctx.Data["IsTextFile"] = true
ctx.Data["FileSize"] = fileSize
} else {
d, _ := ioutil.ReadAll(dataRc)
buf = charset.ToUTF8WithFallback(append(buf, d...))
if markupType := markup.Type(readmeFile.name); markupType != "" {
ctx.Data["IsMarkup"] = true
ctx.Data["MarkupType"] = string(markupType)
ctx.Data["FileContent"] = string(markup.Render(readmeFile.name, buf, readmeTreelink, ctx.Repo.Repository.ComposeDocumentMetas()))
} else {
ctx.Data["IsRenderedHTML"] = true
2018-06-10 20:42:16 +02:00
ctx.Data["FileContent"] = strings.Replace(
gotemplate.HTMLEscapeString(string(buf)), "\n", `<br>`, -1,
)
}
2014-07-26 06:24:27 +02:00
}
}
2016-08-30 11:08:38 +02:00
}
2016-08-30 11:08:38 +02:00
// Show latest commit info of repository in table header,
// or of directory if not in root directory.
ctx.Data["LatestCommit"] = latestCommit
verification := models.ParseCommitWithSignature(latestCommit)
if err := models.CalculateTrustStatus(verification, ctx.Repo.Repository, nil); err != nil {
ctx.ServerError("CalculateTrustStatus", err)
return
}
ctx.Data["LatestCommitVerification"] = verification
2016-08-30 11:08:38 +02:00
ctx.Data["LatestCommitUser"] = models.ValidateCommitWithEmail(latestCommit)
statuses, err := models.GetLatestCommitStatus(ctx.Repo.Repository, ctx.Repo.Commit.ID.String(), 0)
if err != nil {
Better logging (#6038) (#6095) * Panic don't fatal on create new logger Fixes #5854 Signed-off-by: Andrew Thornton <art27@cantab.net> * partial broken * Update the logging infrastrcture Signed-off-by: Andrew Thornton <art27@cantab.net> * Reset the skip levels for Fatal and Error Signed-off-by: Andrew Thornton <art27@cantab.net> * broken ncsa * More log.Error fixes Signed-off-by: Andrew Thornton <art27@cantab.net> * Remove nal * set log-levels to lowercase * Make console_test test all levels * switch to lowercased levels * OK now working * Fix vetting issues * Fix lint * Fix tests * change default logging to match current gitea * Improve log testing Signed-off-by: Andrew Thornton <art27@cantab.net> * reset error skip levels to 0 * Update documentation and access logger configuration * Redirect the router log back to gitea if redirect macaron log but also allow setting the log level - i.e. TRACE * Fix broken level caching * Refactor the router log * Add Router logger * Add colorizing options * Adjust router colors * Only create logger if they will be used * update app.ini.sample * rename Attribute ColorAttribute * Change from white to green for function * Set fatal/error levels * Restore initial trace logger * Fix Trace arguments in modules/auth/auth.go * Properly handle XORMLogger * Improve admin/config page * fix fmt * Add auto-compression of old logs * Update error log levels * Remove the unnecessary skip argument from Error, Fatal and Critical * Add stacktrace support * Fix tests * Remove x/sync from vendors? * Add stderr option to console logger * Use filepath.ToSlash to protect against Windows in tests * Remove prefixed underscores from names in colors.go * Remove not implemented database logger This was removed from Gogs on 4 Mar 2016 but left in the configuration since then. * Ensure that log paths are relative to ROOT_PATH * use path.Join * rename jsonConfig to logConfig * Rename "config" to "jsonConfig" to make it clearer * Requested changes * Requested changes: XormLogger * Try to color the windows terminal If successful default to colorizing the console logs * fixup * Colorize initially too * update vendor * Colorize logs on default and remove if this is not a colorizing logger * Fix documentation * fix test * Use go-isatty to detect if on windows we are on msys or cygwin * Fix spelling mistake * Add missing vendors * More changes * Rationalise the ANSI writer protection * Adjust colors on advice from @0x5c * Make Flags a comma separated list * Move to use the windows constant for ENABLE_VIRTUAL_TERMINAL_PROCESSING * Ensure matching is done on the non-colored message - to simpify EXPRESSION
2019-04-02 09:48:31 +02:00
log.Error("GetLatestCommitStatus: %v", err)
}
ctx.Data["LatestCommitStatus"] = models.CalcCommitStatus(statuses)
2016-08-30 11:08:38 +02:00
// Check permission to add or upload new file.
if ctx.Repo.CanWrite(models.UnitTypeCode) && ctx.Repo.IsViewBranch {
2019-01-23 19:58:38 +01:00
ctx.Data["CanAddFile"] = !ctx.Repo.Repository.IsArchived
ctx.Data["CanUploadFile"] = setting.Repository.Upload.Enabled && !ctx.Repo.Repository.IsArchived
2016-08-30 11:08:38 +02:00
}
}
2014-09-30 10:39:53 +02:00
2016-08-30 11:08:38 +02:00
func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink string) {
ctx.Data["IsViewFile"] = true
2014-07-26 06:24:27 +02:00
2016-08-30 11:08:38 +02:00
blob := entry.Blob()
dataRc, err := blob.DataAsync()
2016-08-30 11:08:38 +02:00
if err != nil {
ctx.ServerError("DataAsync", err)
2016-08-30 11:08:38 +02:00
return
}
defer dataRc.Close()
2014-07-26 06:24:27 +02:00
ctx.Data["Title"] = ctx.Data["Title"].(string) + " - " + ctx.Repo.TreePath + " at " + ctx.Repo.BranchName
fileSize := blob.Size()
ctx.Data["FileIsSymlink"] = entry.IsLink()
ctx.Data["FileSize"] = fileSize
2016-08-30 11:08:38 +02:00
ctx.Data["FileName"] = blob.Name()
ctx.Data["HighlightClass"] = highlight.FileNameToHighlightClass(blob.Name())
ctx.Data["RawFileLink"] = rawLink + "/" + ctx.Repo.TreePath
buf := make([]byte, 1024)
n, _ := dataRc.Read(buf)
2016-08-31 22:59:23 +02:00
buf = buf[:n]
2016-08-30 11:08:38 +02:00
isTextFile := base.IsTextFile(buf)
isLFSFile := false
2016-08-30 11:08:38 +02:00
ctx.Data["IsTextFile"] = isTextFile
Git LFS support v2 (#122) * Import github.com/git-lfs/lfs-test-server as lfs module base Imported commit is 3968aac269a77b73924649b9412ae03f7ccd3198 Removed: Dockerfile CONTRIBUTING.md mgmt* script/ vendor/ kvlogger.go .dockerignore .gitignore README.md * Remove config, add JWT support from github.com/mgit-at/lfs-test-server Imported commit f0cdcc5a01599c5a955dc1bbf683bb4acecdba83 * Add LFS settings * Add LFS meta object model * Add LFS routes and initialization * Import github.com/dgrijalva/jwt-go into vendor/ * Adapt LFS module: handlers, routing, meta store * Move LFS routes to /user/repo/info/lfs/* * Add request header checks to LFS BatchHandler / PostHandler * Implement LFS basic authentication * Rework JWT secret generation / load * Implement LFS SSH token authentication with JWT Specification: https://github.com/github/git-lfs/tree/master/docs/api * Integrate LFS settings into install process * Remove LFS objects when repository is deleted Only removes objects from content store when deleted repo is the only referencing repository * Make LFS module stateless Fixes bug where LFS would not work after installation without restarting Gitea * Change 500 'Internal Server Error' to 400 'Bad Request' * Change sql query to xorm call * Remove unneeded type from LFS module * Change internal imports to code.gitea.io/gitea/ * Add Gitea authors copyright * Change basic auth realm to "gitea-lfs" * Add unique indexes to LFS model * Use xorm count function in LFS check on repository delete * Return io.ReadCloser from content store and close after usage * Add LFS info to runWeb() * Export LFS content store base path * LFS file download from UI * Work around git-lfs client issue with unauthenticated requests Returning a dummy Authorization header for unauthenticated requests lets git-lfs client skip asking for auth credentials See: https://github.com/github/git-lfs/issues/1088 * Fix unauthenticated UI downloads from public repositories * Authentication check order, Finish LFS file view logic * Ignore LFS hooks if installed for current OS user Fixes Gitea UI actions for repositories tracking LFS files. Checks for minimum needed git version by parsing the semantic version string. * Hide LFS metafile diff from commit view, marking as binary * Show LFS notice if file in commit view is tracked * Add notbefore/nbf JWT claim * Correct lint suggestions - comments for structs and functions - Add comments to LFS model - Function comment for GetRandomBytesAsBase64 - LFS server function comments and lint variable suggestion * Move secret generation code out of conditional Ensures no LFS code may run with an empty secret * Do not hand out JWT tokens if LFS server support is disabled
2016-12-26 02:16:37 +01:00
//Check for LFS meta file
if isTextFile && setting.LFS.StartServer {
meta := lfs.IsPointerFile(&buf)
if meta != nil {
meta, err = ctx.Repo.Repository.GetLFSMetaObjectByOid(meta.Oid)
if err != nil && err != models.ErrLFSObjectNotExist {
ctx.ServerError("GetLFSMetaObject", err)
return
}
}
if meta != nil {
ctx.Data["IsLFSFile"] = true
isLFSFile = true
// OK read the lfs object
var err error
dataRc, err = lfs.ReadMetaObject(meta)
if err != nil {
ctx.ServerError("ReadMetaObject", err)
return
}
defer dataRc.Close()
buf = make([]byte, 1024)
n, err = dataRc.Read(buf)
if err != nil {
ctx.ServerError("Data", err)
return
}
buf = buf[:n]
isTextFile = base.IsTextFile(buf)
ctx.Data["IsTextFile"] = isTextFile
fileSize = meta.Size
ctx.Data["FileSize"] = meta.Size
filenameBase64 := base64.RawURLEncoding.EncodeToString([]byte(blob.Name()))
ctx.Data["RawFileLink"] = fmt.Sprintf("%s%s.git/info/lfs/objects/%s/%s", setting.AppURL, ctx.Repo.Repository.FullName(), meta.Oid, filenameBase64)
Git LFS support v2 (#122) * Import github.com/git-lfs/lfs-test-server as lfs module base Imported commit is 3968aac269a77b73924649b9412ae03f7ccd3198 Removed: Dockerfile CONTRIBUTING.md mgmt* script/ vendor/ kvlogger.go .dockerignore .gitignore README.md * Remove config, add JWT support from github.com/mgit-at/lfs-test-server Imported commit f0cdcc5a01599c5a955dc1bbf683bb4acecdba83 * Add LFS settings * Add LFS meta object model * Add LFS routes and initialization * Import github.com/dgrijalva/jwt-go into vendor/ * Adapt LFS module: handlers, routing, meta store * Move LFS routes to /user/repo/info/lfs/* * Add request header checks to LFS BatchHandler / PostHandler * Implement LFS basic authentication * Rework JWT secret generation / load * Implement LFS SSH token authentication with JWT Specification: https://github.com/github/git-lfs/tree/master/docs/api * Integrate LFS settings into install process * Remove LFS objects when repository is deleted Only removes objects from content store when deleted repo is the only referencing repository * Make LFS module stateless Fixes bug where LFS would not work after installation without restarting Gitea * Change 500 'Internal Server Error' to 400 'Bad Request' * Change sql query to xorm call * Remove unneeded type from LFS module * Change internal imports to code.gitea.io/gitea/ * Add Gitea authors copyright * Change basic auth realm to "gitea-lfs" * Add unique indexes to LFS model * Use xorm count function in LFS check on repository delete * Return io.ReadCloser from content store and close after usage * Add LFS info to runWeb() * Export LFS content store base path * LFS file download from UI * Work around git-lfs client issue with unauthenticated requests Returning a dummy Authorization header for unauthenticated requests lets git-lfs client skip asking for auth credentials See: https://github.com/github/git-lfs/issues/1088 * Fix unauthenticated UI downloads from public repositories * Authentication check order, Finish LFS file view logic * Ignore LFS hooks if installed for current OS user Fixes Gitea UI actions for repositories tracking LFS files. Checks for minimum needed git version by parsing the semantic version string. * Hide LFS metafile diff from commit view, marking as binary * Show LFS notice if file in commit view is tracked * Add notbefore/nbf JWT claim * Correct lint suggestions - comments for structs and functions - Add comments to LFS model - Function comment for GetRandomBytesAsBase64 - LFS server function comments and lint variable suggestion * Move secret generation code out of conditional Ensures no LFS code may run with an empty secret * Do not hand out JWT tokens if LFS server support is disabled
2016-12-26 02:16:37 +01:00
}
}
// Check LFS Lock
lfsLock, err := ctx.Repo.Repository.GetTreePathLock(ctx.Repo.TreePath)
ctx.Data["LFSLock"] = lfsLock
if err != nil {
ctx.ServerError("GetTreePathLock", err)
return
}
if lfsLock != nil {
ctx.Data["LFSLockOwner"] = lfsLock.Owner.DisplayName()
ctx.Data["LFSLockHint"] = ctx.Tr("repo.editor.this_file_locked")
}
Git LFS support v2 (#122) * Import github.com/git-lfs/lfs-test-server as lfs module base Imported commit is 3968aac269a77b73924649b9412ae03f7ccd3198 Removed: Dockerfile CONTRIBUTING.md mgmt* script/ vendor/ kvlogger.go .dockerignore .gitignore README.md * Remove config, add JWT support from github.com/mgit-at/lfs-test-server Imported commit f0cdcc5a01599c5a955dc1bbf683bb4acecdba83 * Add LFS settings * Add LFS meta object model * Add LFS routes and initialization * Import github.com/dgrijalva/jwt-go into vendor/ * Adapt LFS module: handlers, routing, meta store * Move LFS routes to /user/repo/info/lfs/* * Add request header checks to LFS BatchHandler / PostHandler * Implement LFS basic authentication * Rework JWT secret generation / load * Implement LFS SSH token authentication with JWT Specification: https://github.com/github/git-lfs/tree/master/docs/api * Integrate LFS settings into install process * Remove LFS objects when repository is deleted Only removes objects from content store when deleted repo is the only referencing repository * Make LFS module stateless Fixes bug where LFS would not work after installation without restarting Gitea * Change 500 'Internal Server Error' to 400 'Bad Request' * Change sql query to xorm call * Remove unneeded type from LFS module * Change internal imports to code.gitea.io/gitea/ * Add Gitea authors copyright * Change basic auth realm to "gitea-lfs" * Add unique indexes to LFS model * Use xorm count function in LFS check on repository delete * Return io.ReadCloser from content store and close after usage * Add LFS info to runWeb() * Export LFS content store base path * LFS file download from UI * Work around git-lfs client issue with unauthenticated requests Returning a dummy Authorization header for unauthenticated requests lets git-lfs client skip asking for auth credentials See: https://github.com/github/git-lfs/issues/1088 * Fix unauthenticated UI downloads from public repositories * Authentication check order, Finish LFS file view logic * Ignore LFS hooks if installed for current OS user Fixes Gitea UI actions for repositories tracking LFS files. Checks for minimum needed git version by parsing the semantic version string. * Hide LFS metafile diff from commit view, marking as binary * Show LFS notice if file in commit view is tracked * Add notbefore/nbf JWT claim * Correct lint suggestions - comments for structs and functions - Add comments to LFS model - Function comment for GetRandomBytesAsBase64 - LFS server function comments and lint variable suggestion * Move secret generation code out of conditional Ensures no LFS code may run with an empty secret * Do not hand out JWT tokens if LFS server support is disabled
2016-12-26 02:16:37 +01:00
2016-08-30 11:08:38 +02:00
// Assume file is not editable first.
if isLFSFile {
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.cannot_edit_lfs_files")
} else if !isTextFile {
2016-08-30 11:08:38 +02:00
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.cannot_edit_non_text_files")
}
switch {
case isTextFile:
if fileSize >= setting.UI.MaxDisplayFileSize {
2016-08-30 11:08:38 +02:00
ctx.Data["IsFileTooLarge"] = true
break
2014-07-26 06:24:27 +02:00
}
2016-08-30 11:08:38 +02:00
d, _ := ioutil.ReadAll(dataRc)
buf = charset.ToUTF8WithFallback(append(buf, d...))
2014-07-26 06:24:27 +02:00
readmeExist := markup.IsReadmeFile(blob.Name())
2016-08-30 11:08:38 +02:00
ctx.Data["ReadmeExist"] = readmeExist
if markupType := markup.Type(blob.Name()); markupType != "" {
ctx.Data["IsMarkup"] = true
ctx.Data["MarkupType"] = markupType
ctx.Data["FileContent"] = string(markup.Render(blob.Name(), buf, path.Dir(treeLink), ctx.Repo.Repository.ComposeDocumentMetas()))
2017-09-21 20:24:19 +02:00
} else if readmeExist {
ctx.Data["IsRenderedHTML"] = true
2018-06-10 20:42:16 +02:00
ctx.Data["FileContent"] = strings.Replace(
gotemplate.HTMLEscapeString(string(buf)), "\n", `<br>`, -1,
)
2016-08-30 11:08:38 +02:00
} else {
// Building code view blocks with line number on server side.
var fileContent string
if content, err := charset.ToUTF8WithErr(buf); err != nil {
2019-06-12 21:41:28 +02:00
log.Error("ToUTF8WithErr: %v", err)
2016-08-30 11:08:38 +02:00
fileContent = string(buf)
} else {
fileContent = content
2014-07-26 06:24:27 +02:00
}
2014-09-26 14:55:13 +02:00
2016-08-30 11:08:38 +02:00
var output bytes.Buffer
lines := strings.Split(fileContent, "\n")
ctx.Data["NumLines"] = len(lines)
if len(lines) == 1 && lines[0] == "" {
// If the file is completely empty, we show zero lines at the line counter
ctx.Data["NumLines"] = 0
}
ctx.Data["NumLinesSet"] = true
//Remove blank line at the end of file
if len(lines) > 0 && lines[len(lines)-1] == "" {
lines = lines[:len(lines)-1]
}
2016-08-30 11:08:38 +02:00
for index, line := range lines {
line = gotemplate.HTMLEscapeString(line)
if index != len(lines)-1 {
line += "\n"
}
output.WriteString(fmt.Sprintf(`<li class="L%d" rel="L%d">%s</li>`, index+1, index+1, line))
2014-09-26 14:55:13 +02:00
}
2016-08-30 11:08:38 +02:00
ctx.Data["FileContent"] = gotemplate.HTML(output.String())
output.Reset()
for i := 0; i < len(lines); i++ {
output.WriteString(fmt.Sprintf(`<span id="L%[1]d" data-line-number="%[1]d"></span>`, i+1))
2016-08-30 11:08:38 +02:00
}
ctx.Data["LineNums"] = gotemplate.HTML(output.String())
2014-09-26 14:55:13 +02:00
}
if !isLFSFile {
if ctx.Repo.CanEnableEditor() {
if lfsLock != nil && lfsLock.OwnerID != ctx.User.ID {
ctx.Data["CanEditFile"] = false
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.this_file_locked")
} else {
ctx.Data["CanEditFile"] = true
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.edit_this_file")
}
} else if !ctx.Repo.IsViewBranch {
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.must_be_on_a_branch")
} else if !ctx.Repo.CanWrite(models.UnitTypeCode) {
ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.fork_before_edit")
}
}
2016-08-30 11:08:38 +02:00
case base.IsPDFFile(buf):
ctx.Data["IsPDFFile"] = true
case base.IsVideoFile(buf):
ctx.Data["IsVideoFile"] = true
case base.IsAudioFile(buf):
ctx.Data["IsAudioFile"] = true
2016-08-30 11:08:38 +02:00
case base.IsImageFile(buf):
ctx.Data["IsImageFile"] = true
default:
if fileSize >= setting.UI.MaxDisplayFileSize {
ctx.Data["IsFileTooLarge"] = true
break
}
if markupType := markup.Type(blob.Name()); markupType != "" {
d, _ := ioutil.ReadAll(dataRc)
buf = append(buf, d...)
ctx.Data["IsMarkup"] = true
ctx.Data["MarkupType"] = markupType
ctx.Data["FileContent"] = string(markup.Render(blob.Name(), buf, path.Dir(treeLink), ctx.Repo.Repository.ComposeDocumentMetas()))
}
2014-07-26 06:24:27 +02:00
}
2016-08-30 11:08:38 +02:00
if ctx.Repo.CanEnableEditor() {
if lfsLock != nil && lfsLock.OwnerID != ctx.User.ID {
ctx.Data["CanDeleteFile"] = false
ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.this_file_locked")
} else {
ctx.Data["CanDeleteFile"] = true
ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.delete_this_file")
}
2016-08-30 11:08:38 +02:00
} else if !ctx.Repo.IsViewBranch {
ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.must_be_on_a_branch")
} else if !ctx.Repo.CanWrite(models.UnitTypeCode) {
2016-08-30 11:08:38 +02:00
ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.must_have_write_access")
}
}
2019-10-13 15:23:14 +02:00
func safeURL(address string) string {
u, err := url.Parse(address)
if err != nil {
return address
}
u.User = nil
return u.String()
}
// Home render repository home page
2016-08-30 11:08:38 +02:00
func Home(ctx *context.Context) {
if len(ctx.Repo.Units) > 0 {
2019-10-13 15:23:14 +02:00
if ctx.Repo.Repository.IsBeingCreated() {
task, err := models.GetMigratingTask(ctx.Repo.Repository.ID)
if err != nil {
ctx.ServerError("models.GetMigratingTask", err)
return
}
cfg, err := task.MigrateConfig()
if err != nil {
ctx.ServerError("task.MigrateConfig", err)
return
}
ctx.Data["Repo"] = ctx.Repo
ctx.Data["MigrateTask"] = task
ctx.Data["CloneAddr"] = safeURL(cfg.CloneAddr)
ctx.HTML(200, tplMigrating)
return
}
var firstUnit *models.Unit
for _, repoUnit := range ctx.Repo.Units {
if repoUnit.Type == models.UnitTypeCode {
renderCode(ctx)
return
}
unit, ok := models.Units[repoUnit.Type]
if ok && (firstUnit == nil || !firstUnit.IsLessThan(unit)) {
firstUnit = &unit
}
}
if firstUnit != nil {
ctx.Redirect(fmt.Sprintf("%s/%s%s", setting.AppSubURL, ctx.Repo.Repository.FullName(), firstUnit.URI))
return
}
}
ctx.NotFound("Home", fmt.Errorf(ctx.Tr("units.error.no_unit_allowed_repo")))
}
func renderLanguageStats(ctx *context.Context) {
langs, err := ctx.Repo.Repository.GetTopLanguageStats(5)
if err != nil {
ctx.ServerError("Repo.GetTopLanguageStats", err)
return
}
ctx.Data["LanguageStats"] = langs
}
func renderRepoTopics(ctx *context.Context) {
topics, err := models.FindTopics(&models.FindTopicOptions{
RepoID: ctx.Repo.Repository.ID,
})
if err != nil {
ctx.ServerError("models.FindTopics", err)
return
}
ctx.Data["Topics"] = topics
}
func renderCode(ctx *context.Context) {
ctx.Data["PageIsViewCode"] = true
if ctx.Repo.Repository.IsEmpty {
ctx.HTML(200, tplRepoEMPTY)
return
}
2016-08-30 11:08:38 +02:00
title := ctx.Repo.Repository.Owner.Name + "/" + ctx.Repo.Repository.Name
if len(ctx.Repo.Repository.Description) > 0 {
title += ": " + ctx.Repo.Repository.Description
}
ctx.Data["Title"] = title
ctx.Data["RequireHighlightJS"] = true
branchLink := ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
2016-08-30 11:08:38 +02:00
treeLink := branchLink
rawLink := ctx.Repo.RepoLink + "/raw/" + ctx.Repo.BranchNameSubURL()
2016-08-30 11:08:38 +02:00
if len(ctx.Repo.TreePath) > 0 {
treeLink += "/" + ctx.Repo.TreePath
}
// Get Topics of this repo
renderRepoTopics(ctx)
if ctx.Written() {
return
}
2016-08-30 11:08:38 +02:00
// Get current entry user currently looking at.
entry, err := ctx.Repo.Commit.GetTreeEntryByPath(ctx.Repo.TreePath)
if err != nil {
ctx.NotFoundOrServerError("Repo.Commit.GetTreeEntryByPath", git.IsErrNotExist, err)
return
}
renderLanguageStats(ctx)
if ctx.Written() {
return
}
2016-08-30 11:08:38 +02:00
if entry.IsDir() {
renderDirectory(ctx, treeLink)
} else {
renderFile(ctx, entry, treeLink, rawLink)
}
if ctx.Written() {
return
}
2014-07-26 06:24:27 +02:00
2016-08-30 11:08:38 +02:00
var treeNames []string
paths := make([]string, 0, 5)
if len(ctx.Repo.TreePath) > 0 {
treeNames = strings.Split(ctx.Repo.TreePath, "/")
for i := range treeNames {
paths = append(paths, strings.Join(treeNames[:i+1], "/"))
2014-07-26 06:24:27 +02:00
}
ctx.Data["HasParentPath"] = true
2016-08-09 21:56:00 +02:00
if len(paths)-2 >= 0 {
ctx.Data["ParentPath"] = "/" + paths[len(paths)-2]
2014-07-26 06:24:27 +02:00
}
}
2016-08-09 21:56:00 +02:00
ctx.Data["Paths"] = paths
2016-08-28 00:25:01 +02:00
ctx.Data["TreeLink"] = treeLink
2016-08-30 11:08:38 +02:00
ctx.Data["TreeNames"] = treeNames
2014-07-26 06:24:27 +02:00
ctx.Data["BranchLink"] = branchLink
ctx.HTML(200, tplRepoHome)
2014-07-26 06:24:27 +02:00
}
2015-11-17 05:28:46 +01:00
2017-02-28 05:56:15 +01:00
// RenderUserCards render a page show users according the input templaet
API add/generalize pagination (#9452) * paginate results * fixed deadlock * prevented breaking change * updated swagger * go fmt * fixed find topic * go mod tidy * go mod vendor with go1.13.5 * fixed repo find topics * fixed unit test * added Limit method to Engine struct; use engine variable when provided; fixed gitignore * use ItemsPerPage for default pagesize; fix GetWatchers, getOrgUsersByOrgID and GetStargazers; fix GetAllCommits headers; reverted some changed behaviors * set Page value on Home route * improved memory allocations * fixed response headers * removed logfiles * fixed import order * import order * improved swagger * added function to get models.ListOptions from context * removed pagesize diff on unit test * fixed imports * removed unnecessary struct field * fixed go fmt * scoped PR * code improvements * code improvements * go mod tidy * fixed import order * fixed commit statuses session * fixed files headers * fixed headers; added pagination for notifications * go mod tidy * go fmt * removed Private from user search options; added setting.UI.IssuePagingNum as default valeu on repo's issues list * Apply suggestions from code review Co-Authored-By: 6543 <6543@obermui.de> Co-Authored-By: zeripath <art27@cantab.net> * fixed build error * CI.restart() * fixed merge conflicts resolve * fixed conflicts resolve * improved FindTrackedTimesOptions.ToOptions() method * added backwards compatibility on ListReleases request; fixed issue tracked time ToSession * fixed build error; fixed swagger template * fixed swagger template * fixed ListReleases backwards compatibility * added page to user search route Co-authored-by: techknowlogick <matti@mdranta.net> Co-authored-by: 6543 <6543@obermui.de> Co-authored-by: zeripath <art27@cantab.net>
2020-01-24 20:00:29 +01:00
func RenderUserCards(ctx *context.Context, total int, getter func(opts models.ListOptions) ([]*models.User, error), tpl base.TplName) {
2015-11-17 05:28:46 +01:00
page := ctx.QueryInt("page")
if page <= 0 {
page = 1
}
pager := context.NewPagination(total, models.ItemsPerPage, page, 5)
2015-11-17 05:28:46 +01:00
ctx.Data["Page"] = pager
API add/generalize pagination (#9452) * paginate results * fixed deadlock * prevented breaking change * updated swagger * go fmt * fixed find topic * go mod tidy * go mod vendor with go1.13.5 * fixed repo find topics * fixed unit test * added Limit method to Engine struct; use engine variable when provided; fixed gitignore * use ItemsPerPage for default pagesize; fix GetWatchers, getOrgUsersByOrgID and GetStargazers; fix GetAllCommits headers; reverted some changed behaviors * set Page value on Home route * improved memory allocations * fixed response headers * removed logfiles * fixed import order * import order * improved swagger * added function to get models.ListOptions from context * removed pagesize diff on unit test * fixed imports * removed unnecessary struct field * fixed go fmt * scoped PR * code improvements * code improvements * go mod tidy * fixed import order * fixed commit statuses session * fixed files headers * fixed headers; added pagination for notifications * go mod tidy * go fmt * removed Private from user search options; added setting.UI.IssuePagingNum as default valeu on repo's issues list * Apply suggestions from code review Co-Authored-By: 6543 <6543@obermui.de> Co-Authored-By: zeripath <art27@cantab.net> * fixed build error * CI.restart() * fixed merge conflicts resolve * fixed conflicts resolve * improved FindTrackedTimesOptions.ToOptions() method * added backwards compatibility on ListReleases request; fixed issue tracked time ToSession * fixed build error; fixed swagger template * fixed swagger template * fixed ListReleases backwards compatibility * added page to user search route Co-authored-by: techknowlogick <matti@mdranta.net> Co-authored-by: 6543 <6543@obermui.de> Co-authored-by: zeripath <art27@cantab.net>
2020-01-24 20:00:29 +01:00
items, err := getter(models.ListOptions{Page: pager.Paginater.Current()})
2015-11-17 05:28:46 +01:00
if err != nil {
ctx.ServerError("getter", err)
2015-11-17 05:28:46 +01:00
return
}
ctx.Data["Cards"] = items
2015-11-17 05:28:46 +01:00
ctx.HTML(200, tpl)
2015-11-17 05:28:46 +01:00
}
// Watchers render repository's watch users
2016-03-11 17:56:52 +01:00
func Watchers(ctx *context.Context) {
2015-11-17 05:28:46 +01:00
ctx.Data["Title"] = ctx.Tr("repo.watchers")
ctx.Data["CardsTitle"] = ctx.Tr("repo.watchers")
2015-11-17 05:28:46 +01:00
ctx.Data["PageIsWatchers"] = true
RenderUserCards(ctx, ctx.Repo.Repository.NumWatches, ctx.Repo.Repository.GetWatchers, tplWatchers)
2015-11-17 05:28:46 +01:00
}
// Stars render repository's starred users
2016-03-11 17:56:52 +01:00
func Stars(ctx *context.Context) {
2015-11-17 05:28:46 +01:00
ctx.Data["Title"] = ctx.Tr("repo.stargazers")
ctx.Data["CardsTitle"] = ctx.Tr("repo.stargazers")
2015-11-17 05:28:46 +01:00
ctx.Data["PageIsStargazers"] = true
RenderUserCards(ctx, ctx.Repo.Repository.NumStars, ctx.Repo.Repository.GetStargazers, tplWatchers)
2015-11-17 05:28:46 +01:00
}
2015-11-17 05:33:40 +01:00
// Forks render repository's forked users
2016-03-11 17:56:52 +01:00
func Forks(ctx *context.Context) {
2015-11-17 05:33:40 +01:00
ctx.Data["Title"] = ctx.Tr("repos.forks")
API add/generalize pagination (#9452) * paginate results * fixed deadlock * prevented breaking change * updated swagger * go fmt * fixed find topic * go mod tidy * go mod vendor with go1.13.5 * fixed repo find topics * fixed unit test * added Limit method to Engine struct; use engine variable when provided; fixed gitignore * use ItemsPerPage for default pagesize; fix GetWatchers, getOrgUsersByOrgID and GetStargazers; fix GetAllCommits headers; reverted some changed behaviors * set Page value on Home route * improved memory allocations * fixed response headers * removed logfiles * fixed import order * import order * improved swagger * added function to get models.ListOptions from context * removed pagesize diff on unit test * fixed imports * removed unnecessary struct field * fixed go fmt * scoped PR * code improvements * code improvements * go mod tidy * fixed import order * fixed commit statuses session * fixed files headers * fixed headers; added pagination for notifications * go mod tidy * go fmt * removed Private from user search options; added setting.UI.IssuePagingNum as default valeu on repo's issues list * Apply suggestions from code review Co-Authored-By: 6543 <6543@obermui.de> Co-Authored-By: zeripath <art27@cantab.net> * fixed build error * CI.restart() * fixed merge conflicts resolve * fixed conflicts resolve * improved FindTrackedTimesOptions.ToOptions() method * added backwards compatibility on ListReleases request; fixed issue tracked time ToSession * fixed build error; fixed swagger template * fixed swagger template * fixed ListReleases backwards compatibility * added page to user search route Co-authored-by: techknowlogick <matti@mdranta.net> Co-authored-by: 6543 <6543@obermui.de> Co-authored-by: zeripath <art27@cantab.net>
2020-01-24 20:00:29 +01:00
forks, err := ctx.Repo.Repository.GetForks(models.ListOptions{})
2015-11-17 05:33:40 +01:00
if err != nil {
ctx.ServerError("GetForks", err)
2015-11-17 05:33:40 +01:00
return
}
for _, fork := range forks {
if err = fork.GetOwner(); err != nil {
ctx.ServerError("GetOwner", err)
2015-11-17 05:33:40 +01:00
return
}
}
ctx.Data["Forks"] = forks
ctx.HTML(200, tplForks)
2015-11-17 05:33:40 +01:00
}