2020-12-17 17:00:47 +03:00
// Copyright 2020 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
// +build !gogit
package git
import (
2021-02-17 22:32:47 +03:00
"bufio"
2020-12-17 17:00:47 +03:00
"bytes"
"io"
2021-02-17 22:32:47 +03:00
"math"
"strings"
2020-12-17 17:00:47 +03:00
"code.gitea.io/gitea/modules/analyze"
"github.com/go-enry/go-enry/v2"
)
// GetLanguageStats calculates language stats for git repository at specified commit
func ( repo * Repository ) GetLanguageStats ( commitID string ) ( map [ string ] int64 , error ) {
2021-02-17 22:32:47 +03:00
// We will feed the commit IDs in order into cat-file --batch, followed by blobs as necessary.
// so let's create a batch stdin and stdout
batchStdinReader , batchStdinWriter := io . Pipe ( )
batchStdoutReader , batchStdoutWriter := io . Pipe ( )
defer func ( ) {
_ = batchStdinReader . Close ( )
_ = batchStdinWriter . Close ( )
_ = batchStdoutReader . Close ( )
_ = batchStdoutWriter . Close ( )
} ( )
go func ( ) {
stderr := strings . Builder { }
err := NewCommand ( "cat-file" , "--batch" ) . RunInDirFullPipeline ( repo . Path , batchStdoutWriter , & stderr , batchStdinReader )
if err != nil {
_ = batchStdoutWriter . CloseWithError ( ConcatenateError ( err , ( & stderr ) . String ( ) ) )
_ = batchStdinReader . CloseWithError ( ConcatenateError ( err , ( & stderr ) . String ( ) ) )
} else {
_ = batchStdoutWriter . Close ( )
_ = batchStdinReader . Close ( )
}
} ( )
// For simplicities sake we'll us a buffered reader
batchReader := bufio . NewReader ( batchStdoutReader )
writeID := func ( id string ) error {
_ , err := batchStdinWriter . Write ( [ ] byte ( id ) )
if err != nil {
return err
}
_ , err = batchStdinWriter . Write ( [ ] byte { '\n' } )
return err
}
if err := writeID ( commitID ) ; err != nil {
return nil , err
}
shaBytes , typ , size , err := ReadBatchLine ( batchReader )
if typ != "commit" {
log ( "Unable to get commit for: %s. Err: %v" , commitID , err )
return nil , ErrNotExist { commitID , "" }
}
sha , err := NewIDFromString ( string ( shaBytes ) )
2020-12-17 17:00:47 +03:00
if err != nil {
2021-02-17 22:32:47 +03:00
log ( "Unable to get commit for: %s. Err: %v" , commitID , err )
return nil , ErrNotExist { commitID , "" }
}
commit , err := CommitFromReader ( repo , sha , io . LimitReader ( batchReader , size ) )
if err != nil {
log ( "Unable to get commit for: %s. Err: %v" , commitID , err )
2020-12-17 17:00:47 +03:00
return nil , err
}
tree := commit . Tree
entries , err := tree . ListEntriesRecursive ( )
if err != nil {
return nil , err
}
2021-02-17 22:32:47 +03:00
contentBuf := bytes . Buffer { }
var content [ ] byte
2020-12-17 17:00:47 +03:00
sizes := make ( map [ string ] int64 )
for _ , f := range entries {
2021-02-17 22:32:47 +03:00
contentBuf . Reset ( )
content = contentBuf . Bytes ( )
2020-12-17 17:00:47 +03:00
if f . Size ( ) == 0 || enry . IsVendor ( f . Name ( ) ) || enry . IsDotFile ( f . Name ( ) ) ||
enry . IsDocumentation ( f . Name ( ) ) || enry . IsConfiguration ( f . Name ( ) ) {
continue
}
// If content can not be read or file is too big just do detection by filename
2021-02-17 22:32:47 +03:00
2020-12-17 17:00:47 +03:00
if f . Size ( ) <= bigFileSize {
2021-02-17 22:32:47 +03:00
if err := writeID ( f . ID . String ( ) ) ; err != nil {
return nil , err
}
_ , _ , size , err := ReadBatchLine ( batchReader )
if err != nil {
log ( "Error reading blob: %s Err: %v" , f . ID . String ( ) , err )
return nil , err
}
sizeToRead := size
discard := int64 ( 0 )
if size > fileSizeLimit {
sizeToRead = fileSizeLimit
discard = size - fileSizeLimit
}
_ , err = contentBuf . ReadFrom ( io . LimitReader ( batchReader , sizeToRead ) )
if err != nil {
return nil , err
}
content = contentBuf . Bytes ( )
err = discardFull ( batchReader , discard )
if err != nil {
return nil , err
}
2020-12-17 17:00:47 +03:00
}
if enry . IsGenerated ( f . Name ( ) , content ) {
continue
}
// TODO: Use .gitattributes file for linguist overrides
// FIXME: Why can't we split this and the IsGenerated tests to avoid reading the blob unless absolutely necessary?
// - eg. do the all the detection tests using filename first before reading content.
language := analyze . GetCodeLanguage ( f . Name ( ) , content )
if language == enry . OtherLanguage || language == "" {
continue
}
// group languages, such as Pug -> HTML; SCSS -> CSS
group := enry . GetLanguageGroup ( language )
if group != "" {
language = group
}
sizes [ language ] += f . Size ( )
continue
}
// filter special languages unless they are the only language
if len ( sizes ) > 1 {
for language := range sizes {
langtype := enry . GetLanguageType ( language )
if langtype != enry . Programming && langtype != enry . Markup {
delete ( sizes , language )
}
}
}
return sizes , nil
}
2021-02-17 22:32:47 +03:00
func discardFull ( rd * bufio . Reader , discard int64 ) error {
if discard > math . MaxInt32 {
n , err := rd . Discard ( math . MaxInt32 )
discard -= int64 ( n )
if err != nil {
return err
}
2020-12-17 17:00:47 +03:00
}
2021-02-17 22:32:47 +03:00
for discard > 0 {
n , err := rd . Discard ( int ( discard ) )
discard -= int64 ( n )
if err != nil {
return err
}
2020-12-17 17:00:47 +03:00
}
2021-02-17 22:32:47 +03:00
return nil
2020-12-17 17:00:47 +03:00
}